diff --git a/testbed/django__django/.editorconfig b/testbed/django__django/.editorconfig new file mode 100644 index 0000000000000000000000000000000000000000..e43144c4c815ed5f8b10fb3e84d9870aaf8c1f45 --- /dev/null +++ b/testbed/django__django/.editorconfig @@ -0,0 +1,47 @@ +# https://editorconfig.org/ + +root = true + +[*] +indent_style = space +indent_size = 4 +insert_final_newline = true +trim_trailing_whitespace = true +end_of_line = lf +charset = utf-8 + +# Docstrings and comments use max_line_length = 79 +[*.py] +max_line_length = 88 + +# Use 2 spaces for the HTML files +[*.html] +indent_size = 2 + +# The JSON files contain newlines inconsistently +[*.json] +indent_size = 2 +insert_final_newline = ignore + +[**/admin/js/vendor/**] +indent_style = ignore +indent_size = ignore + +# Minified JavaScript files shouldn't be changed +[**.min.js] +indent_style = ignore +insert_final_newline = ignore + +# Makefiles always use tabs for indentation +[Makefile] +indent_style = tab + +# Batch files use tabs for indentation +[*.bat] +indent_style = tab + +[docs/**.txt] +max_line_length = 79 + +[*.yml] +indent_size = 2 diff --git a/testbed/django__django/.eslintignore b/testbed/django__django/.eslintignore new file mode 100644 index 0000000000000000000000000000000000000000..6e4edbd66d62223e1e7f032afc6f37c81f28fad0 --- /dev/null +++ b/testbed/django__django/.eslintignore @@ -0,0 +1,7 @@ +**/*.min.js +**/vendor/**/*.js +django/contrib/gis/templates/**/*.js +django/views/templates/*.js +docs/_build/**/*.js +node_modules/**.js +tests/**/*.js diff --git a/testbed/django__django/.eslintrc b/testbed/django__django/.eslintrc new file mode 100644 index 0000000000000000000000000000000000000000..332755a844b0ac8d8fd50e5b2b58d66e47561118 --- /dev/null +++ b/testbed/django__django/.eslintrc @@ -0,0 +1,37 @@ +{ + "rules": { + "camelcase": ["off", {"properties": "always"}], + "comma-spacing": ["error", {"before": false, "after": true}], + "curly": ["error", "all"], + "dot-notation": ["error", {"allowKeywords": true}], + "eqeqeq": ["error"], + "indent": ["error", 4], + "key-spacing": ["error", {"beforeColon": false, "afterColon": true}], + "linebreak-style": ["error", "unix"], + "new-cap": ["off", {"newIsCap": true, "capIsNew": true}], + "no-alert": ["off"], + "no-eval": ["error"], + "no-extend-native": ["error", {"exceptions": ["Date", "String"]}], + "no-multi-spaces": ["error"], + "no-octal-escape": ["error"], + "no-script-url": ["error"], + "no-shadow": ["error", {"hoist": "functions"}], + "no-underscore-dangle": ["error"], + "no-unused-vars": ["error", {"vars": "local", "args": "none"}], + "no-var": ["error"], + "prefer-const": ["error"], + "quotes": ["off", "single"], + "semi": ["error", "always"], + "space-before-blocks": ["error", "always"], + "space-before-function-paren": ["error", {"anonymous": "never", "named": "never"}], + "space-infix-ops": ["error", {"int32Hint": false}], + "strict": ["error", "global"] + }, + "env": { + "browser": true, + "es6": true + }, + "globals": { + "django": false + } +} diff --git a/testbed/django__django/.git-blame-ignore-revs b/testbed/django__django/.git-blame-ignore-revs new file mode 100644 index 0000000000000000000000000000000000000000..bf8b701ed23c346c42e27860c8bae1f51107f347 --- /dev/null +++ b/testbed/django__django/.git-blame-ignore-revs @@ -0,0 +1,9 @@ +ca88caa1031c0de545d82de8d90dcae0e03651fb +c5cd8783825b5f6384417dac5f3889b4210b7d08 +9c19aff7c7561e3a82978a272ecdaad40dda5c00 +7119f40c9881666b6f9b5cf7df09ee1d21cc8344 +c18861804feb6a97afbeabb51be748dd60a04458 +097e3a70c1481ee7b042b2edd91b2be86fb7b5b6 +534ac4829764f317cf2fbc4a18354fcc998c1425 +ba755ca13123d2691a0926ddb64e5d0a2906a880 +14459f80ee3a9e005989db37c26fd13bb6d2fab2 diff --git a/testbed/django__django/.gitattributes b/testbed/django__django/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..170bbc2b548b62a97a79f56adcad7ff7f4992c3e --- /dev/null +++ b/testbed/django__django/.gitattributes @@ -0,0 +1,7 @@ +# Normalize line endings to avoid spurious failures in the core test suite on Windows. +*html text eol=lf +*css text eol=lf +*js text eol=lf +tests/staticfiles_tests/apps/test/static/test/*txt text eol=lf +tests/staticfiles_tests/project/documents/test/*txt text eol=lf +docs/releases/*.txt merge=union diff --git a/testbed/django__django/.gitignore b/testbed/django__django/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..6a67c6eb289c0f3e7f28e54801ecbe4554d60ef2 --- /dev/null +++ b/testbed/django__django/.gitignore @@ -0,0 +1,18 @@ +# If you need to exclude files such as those generated by an IDE, use +# $GIT_DIR/info/exclude or the core.excludesFile configuration variable as +# described in https://git-scm.com/docs/gitignore + +*.egg-info +*.pot +*.py[co] +.tox/ +__pycache__ +MANIFEST +dist/ +docs/_build/ +docs/locale/ +node_modules/ +tests/coverage_html/ +tests/.coverage* +build/ +tests/report/ diff --git a/testbed/django__django/.pre-commit-config.yaml b/testbed/django__django/.pre-commit-config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5d0dd849793ebcfc57d371ea6b16576a216732a4 --- /dev/null +++ b/testbed/django__django/.pre-commit-config.yaml @@ -0,0 +1,24 @@ +repos: + - repo: https://github.com/psf/black + rev: 23.7.0 + hooks: + - id: black + exclude: \.py-tpl$ + - repo: https://github.com/adamchainz/blacken-docs + rev: 1.13.0 + hooks: + - id: blacken-docs + additional_dependencies: + - black==23.7.0 + - repo: https://github.com/PyCQA/isort + rev: 5.12.0 + hooks: + - id: isort + - repo: https://github.com/PyCQA/flake8 + rev: 6.0.0 + hooks: + - id: flake8 + - repo: https://github.com/pre-commit/mirrors-eslint + rev: v8.44.0 + hooks: + - id: eslint diff --git a/testbed/django__django/.readthedocs.yml b/testbed/django__django/.readthedocs.yml new file mode 100644 index 0000000000000000000000000000000000000000..bde8b64da0f0961142a6c4ade9dcf1dca96cc6a9 --- /dev/null +++ b/testbed/django__django/.readthedocs.yml @@ -0,0 +1,18 @@ +# Configuration for the Read The Docs (RTD) builds of the documentation. +# Ref: https://docs.readthedocs.io/en/stable/config-file/v2.html +# The python.install.requirements pins the version of Sphinx used. +version: 2 + +build: + os: ubuntu-20.04 + tools: + python: "3.8" + +sphinx: + configuration: docs/conf.py + +python: + install: + - requirements: docs/requirements.txt + +formats: all diff --git a/testbed/django__django/AUTHORS b/testbed/django__django/AUTHORS new file mode 100644 index 0000000000000000000000000000000000000000..40df3589c024df075dcf62425f5ceb78f31d1253 --- /dev/null +++ b/testbed/django__django/AUTHORS @@ -0,0 +1,1073 @@ +Django was originally created in late 2003 at World Online, the web division +of the Lawrence Journal-World newspaper in Lawrence, Kansas. + +Here is an inevitably incomplete list of MUCH-APPRECIATED CONTRIBUTORS -- +people who have submitted patches, reported bugs, added translations, helped +answer newbie questions, and generally made Django that much better: + + Aaron Cannon + Aaron Swartz + Aaron T. Myers + Abeer Upadhyay + Abhijeet Viswa + Abhinav Patil + Abhinav Yadav + Abhishek Gautam + Abhyudai + Adam Allred + Adam Bogdał + Adam Donaghy + Adam Johnson + Adam Malinowski + Adam Vandenberg + Ade Lee + Adiyat Mubarak + Adnan Umer + Arslan Noor + Adrian Holovaty + Adrian Torres + Adrien Lemaire + Afonso Fernández Nogueira + AgarFu + Ahmad Alhashemi + Ahmad Al-Ibrahim + Ahmed Eltawela + ajs + Akash Agrawal + Akash Kumar Sen + Akis Kesoglou + Aksel Ethem + Akshesh Doshi + alang@bright-green.com + Alasdair Nicol + Albert Wang + Alcides Fonseca + Aldian Fazrihady + Aleksandra Sendecka + Aleksi Häkli + Alex Dutton + Alexander Myodov + Alexandr Tatarinov + Alex Aktsipetrov + Alex Becker + Alex Couper + Alex Dedul + Alex Gaynor + Alex Hill + Alex Ogier + Alex Robbins + Alexey Boriskin + Alexey Tsivunin + Ali Vakilzade + Aljaž Košir + Aljosa Mohorovic + Alokik Vijay + Amit Chakradeo + Amit Ramon + Amit Upadhyay + A. Murat Eren + Ana Belen Sarabia + Ana Krivokapic + Andi Albrecht + André Ericson + Andrei Kulakov + Andreas + Andreas Mock + Andreas Pelme + Andrés Torres Marroquín + Andrew Brehaut + Andrew Clark + Andrew Durdin + Andrew Godwin + Andrew Pinkham + Andrews Medina + Andrew Northall + Andriy Sokolovskiy + Andy Chosak + Andy Dustman + Andy Gayton + andy@jadedplanet.net + Anssi Kääriäinen + ant9000@netwise.it + Anthony Briggs + Anthony Wright + Anton Samarchyan + Antoni Aloy + Antonio Cavedoni + Antonis Christofides + Antti Haapala + Antti Kaihola + Anubhav Joshi + Anvesh Mishra + Aram Dulyan + arien + Armin Ronacher + Aron Podrigal + Arsalan Ghassemi + Artem Gnilov + Arthur + Arthur Jovart + Arthur Koziel + Arthur Moreira + Arthur Rio + Arvis Bickovskis + Arya Khaligh + Aryeh Leib Taurog + A S Alam + Asif Saif Uddin + atlithorn + Audrey Roy + av0000@mail.ru + Axel Haustant + Aymeric Augustin + Bahadır Kandemir + Baishampayan Ghose + Baptiste Mispelon + Barry Pederson + Bartolome Sanchez Salado + Barton Ip + Bartosz Grabski + Bashar Al-Abdulhadi + Bastian Kleineidam + Batiste Bieler + Batman + Batuhan Taskaya + Baurzhan Ismagulov + Ben Dean Kawamura + Ben Firshman + Ben Godfrey + Benjamin Wohlwend + Ben Khoo + Ben Lomax + Ben Slavin + Ben Sturmfels + Berker Peksag + Bernd Schlapsi + Bernhard Essl + berto + Bhuvnesh Sharma + Bill Fenner + Bjørn Stabell + Bo Marchman + Bogdan Mateescu + Bojan Mihelac + Bouke Haarsma + Božidar Benko + Brad Melin + Brandon Chinn + Brant Harris + Brendan Hayward + Brendan Quinn + Brenton Simpson + Brett Cannon + Brett Hoerner + Brian Beck + Brian Fabian Crain + Brian Harring + Brian Helba + Brian Ray + Brian Rosner + Bruce Kroeze + Bruno Alla + Bruno Renié + brut.alll@gmail.com + Bryan Chow + Bryan Veloso + bthomas + btoll@bestweb.net + C8E + Caio Ariede + Calvin Spealman + Cameron Curry + Cameron Knight (ckknight) + Can Burak Çilingir + Can Sarıgöl + Carl Meyer + Carles Pina i Estany + Carlos Eduardo de Paula + Carlos Matías de la Torre + Carlton Gibson + cedric@terramater.net + Chad Whitman + ChaosKCW + Charlie Leifer + charly.wilhelm@gmail.com + Chason Chaffin + Cheng Zhang + Chris Adams + Chris Beaven + Chris Bennett + Chris Cahoon + Chris Chamberlin + Chris Jerdonek + Chris Jones + Chris Lamb + Chris Streeter + Christian Barcenas + Christian Metts + Christian Oudard + Christian Tanzer + Christoffer Sjöbergsson + Christophe Pettus + Christopher Adams + Christopher Babiak + Christopher Lenz + Christoph Mędrela + Chris Wagner + Chris Wesseling + Chris Wilson + Ciaran McCormick + Claude Paroz + Clint Ecker + colin@owlfish.com + Colin Wood + Collin Anderson + Collin Grady + Colton Hicks + Craig Blaszczyk + crankycoder@gmail.com + Curtis Maloney (FunkyBob) + dackze+django@gmail.com + Dagur Páll Ammendrup + Dane Springmeyer + Dan Fairs + Daniel Alves Barbosa de Oliveira Vaz + Daniel Duan + Daniele Procida + Daniel Fairhead + Daniel Greenfeld + dAniel hAhler + Daniel Jilg + Daniel Lindsley + Daniel Poelzleithner + Daniel Pyrathon + Daniel Roseman + Daniel Tao + Daniel Wiesmann + Danilo Bargen + Dan Johnson + Dan Palmer + Dan Poirier + Dan Stephenson + Dan Watson + dave@thebarproject.com + David Ascher + David Avsajanishvili + David Blewett + David Brenneman + David Cramer + David Danier + David Eklund + David Foster + David Gouldin + david@kazserve.org + David Krauth + David Larlet + David Reynolds + David Sanders + David Schein + David Tulig + David Winterbottom + David Wobrock + Davide Ceretti + Deep L. Sukhwani + Deepak Thukral + Denis Kuzmichyov + Dennis Schwertel + Derek Willis + Deric Crago + deric@monowerks.com + Deryck Hodge + Dimitris Glezos + Dirk Datzert + Dirk Eschler + Dmitri Fedortchenko + Dmitry Jemerov + dne@mayonnaise.net + Dolan Antenucci + Donald Harvey + Donald Stufft + Don Spaulding + Doug Beck + Doug Napoleone + dready + Durval Carvalho de Souza + dusk@woofle.net + Dustyn Gibson + Ed Morley + Egidijus Macijauskas + eibaan@gmail.com + elky + Emmanuelle Delescolle + Emil Stenström + enlight + Enrico + Eric Boersma + Eric Brandwein + Eric Floehr + Eric Florenzano + Eric Holscher + Eric Moritz + Eric Palakovich Carr + Erik Karulf + Erik Romijn + eriks@win.tue.nl + Erwin Junge + Esdras Beleza + Espen Grindhaug + Étienne Beaulé + Eugene Lazutkin + Evan Grim + Fabian Büchler + Fabrice Aneche + Farhaan Bukhsh + favo@exoweb.net + fdr + Federico Capoano + Felipe Lee + Filip Noetzel + Filip Wasilewski + Finn Gruwier Larsen + Fiza Ashraf + Flávio Juvenal da Silva Junior + flavio.curella@gmail.com + Florian Apolloner + Florian Demmer + Florian Moussous + fnaimi66 + Fran Hrženjak + Francesco Panico + Francisco Albarran Cristobal + Francisco Couzo + François Freitag + Frank Tegtmeyer + Frank Wierzbicki + Frank Wiles + František Malina + Fraser Nevett + Gabriel Grant + Gabriel Hurley + gandalf@owca.info + Garry Lawrence + Garry Polley + Garth Kidd + Gary Wilson + Gasper Koren + Gasper Zejn + Gav O'Connor + Gavin Wahl + Ge Hanbin + geber@datacollect.com + Geert Vanderkelen + George Karpenkov + George Song + George Vilches + Georg "Hugo" Bauer + Georgi Stanojevski + Gerardo Orozco + Gil Gonçalves + Girish Kumar + Girish Sontakke + Gisle Aas + Glenn Maynard + glin@seznam.cz + GomoX + Gonzalo Saavedra + Gopal Narayanan + Graham Carlyle + Grant Jenks + Greg Chapple + Greg Twohig + Gregor Allensworth + Gregor Müllegger + Grigory Fateyev + Grzegorz Ślusarek + Guilherme Mesquita Gondim + Guillaume Pannatier + Gustavo Picon + hambaloney + Hang Park + Hannes Ljungberg + Hannes Struß + Harm Geerts + Hasan Ramezani + Hawkeye + Helen Sherwood-Taylor + Henrique Romano + Henry Dang + Hidde Bultsma + Himanshu Chauhan + hipertracker@gmail.com + Hiroki Kiyohara + Honza Král + Horst Gutmann + Hugo Osvaldo Barrera + HyukJin Jang + Hyun Mi Ae + Iacopo Spalletti + Ian A Wilson + Ian Clelland + Ian G. Kelly + Ian Holsman + Ian Lee + Ibon + Idan Gazit + Idan Melamed + Ifedapo Olarewaju + Igor Kolar + Illia Volochii + Ilya Bass + Ilya Semenov + Ingo Klöcker + I.S. van Oostveen + Iuri de Silvio + ivan.chelubeev@gmail.com + Ivan Sagalaev (Maniac) + Jaap Roes + Jack Moffitt + Jacob Burch + Jacob Green + Jacob Kaplan-Moss + Jacob Rief + Jacob Walls + Jakub Paczkowski + Jakub Wilk + Jakub Wiśniowski + james_027@yahoo.com + James Aylett + James Bennett + James Gillard + James Murty + James Tauber + James Timmins + James Turk + James Wheare + Jamie Matthews + Jannis Leidel + Janos Guljas + Jan Pazdziora + Jan Rademaker + Jarek Głowacki + Jarek Zgoda + Jarosław Wygoda + Jason Davies (Esaj) + Jason Huggins + Jason McBrayer + jason.sidabras@gmail.com + Jason Yan + Javier Mansilla + Jay Parlar + Jay Welborn + Jay Wineinger + J. Clifford Dyer + jcrasta@gmail.com + jdetaeye + Jeff Anderson + Jeff Balogh + Jeff Hui + Jeffrey Gelens + Jeff Triplett + Jeffrey Yancey + Jens Diemer + Jens Page + Jensen Cochran + Jeong-Min Lee + Jérémie Blaser + Jeremy Bowman + Jeremy Carbaugh + Jeremy Dunck + Jeremy Lainé + Jerin Peter George + Jesse Young + Jezeniel Zapanta + jhenry + Jim Dalton + Jimmy Song + Jiri Barton + Joachim Jablon + Joao Oliveira + Joao Pedro Silva + Joe Heck + Joe Jackson + Joel Bohman + Joel Heenan + Joel Watts + Joe Topjian + Johan C. Stöver + Johann Queuniet + john@calixto.net + John D'Agostino + John D'Ambrosio + John Huddleston + John Moses + John Paulett + John Shaffer + Jökull Sólberg Auðunsson + Jon Dufresne + Jon Janzen + Jonas Haag + Jonas Lundberg + Jonathan Davis + Jonatas C. D. + Jonathan Buchanan + Jonathan Daugherty (cygnus) + Jonathan Feignberg + Jonathan Slenders + Jonny Park + Jordan Bae + Jordan Dimov + Jordi J. Tablada + Jorge Bastida + Jorge Gajon + José Tomás Tocino García + Josef Rousek + Joseph Kocherhans + Josh Smeaton + Joshua Cannon + Joshua Ginsberg + Jozko Skrablin + J. Pablo Fernandez + jpellerin@gmail.com + Juan Catalano + Juan Manuel Caicedo + Juan Pedro Fisanotti + Julia Elman + Julia Matsieva + Julian Bez + Julie Rymer + Julien Phalip + Junyoung Choi + junzhang.jn@gmail.com + Jure Cuhalev + Justin Bronn + Justine Tunney + Justin Lilly + Justin Michalicek + Justin Myles Holmes + Jyrki Pulliainen + Kacper Wolkiewicz + Kadesarin Sanjek + Kapil Bansal + Karderio + Karen Tracey + Karol Sikora + Katherine “Kati” Michel + Kathryn Killebrew + Katie Miller + Keith Bussell + Kenneth Love + Kent Hauser + Keryn Knight + Kevin Grinberg + Kevin Kubasik + Kevin McConnell + Kieran Holland + kilian + Kim Joon Hwan 김준환 + Kim Soung Ryoul 김성렬 + Klaas van Schelven + knox + konrad@gwu.edu + Kowito Charoenratchatabhan + Krišjānis Vaiders + krzysiek.pawlik@silvermedia.pl + Krzysztof Jagiello + Krzysztof Jurewicz + Krzysztof Kulewski + kurtiss@meetro.com + Lakin Wecker + Lars Yencken + Lau Bech Lauritzen + Laurent Luce + Laurent Rahuel + lcordier@point45.com + Leah Culver + Leandra Finger + Lee Reilly + Lee Sanghyuck + Leo "hylje" Honkanen + Leo Shklovskii + Leo Soto + lerouxb@gmail.com + Lex Berezhny + Liang Feng + Lily Foote + limodou + Lincoln Smith + Liu Yijie <007gzs@gmail.com> + Loek van Gent + Loïc Bistuer + Lowe Thiderman + Luan Pablo + Lucas Connors + Luciano Ramalho + Lucidiot + Ludvig Ericson + Luis C. Berrocal + Łukasz Langa + Łukasz Rekucki + Luke Granger-Brown + Luke Plant + Maciej Fijalkowski + Maciej Wiśniowski + Mads Jensen + Makoto Tsuyuki + Malcolm Tredinnick + Manav Agarwal + Manuel Saelices + Manuzhai + Marc Aymerich Gubern + Marc Egli + Marcel Telka + Marcelo Galigniana + Marc Fargas + Marc Garcia + Marcin Wróbel + Marc Remolt + Marc Seguí Coll + Marc Tamlyn + Marc-Aurèle Brothier + Marian Andre + Marijn Vriens + Mario Gonzalez + Mariusz Felisiak + Mark Biggers + Mark Evans + Mark Gensler + mark@junklight.com + Mark Lavin + Mark Sandstrom + Markus Amalthea Magnuson + Markus Holtermann + Marten Kenbeek + Marti Raudsepp + martin.glueck@gmail.com + Martin Green + Martin Kosír + Martin Mahner + Martin Maney + Martin von Gagern + Mart Sõmermaa + Marty Alchin + Masashi Shibata + masonsimon+django@gmail.com + Massimiliano Ravelli + Massimo Scamarcia + Mathieu Agopian + Matías Bordese + Matt Boersma + Matt Brewer + Matt Croydon + Matt Deacalion Stevens + Matt Dennenbaum + Matthew Flanagan + Matthew Schinckel + Matthew Somerville + Matthew Tretter + Matthew Wilkes + Matthias Kestenholz + Matthias Pronk + Matt Hoskins + Matt McClanahan + Matt Riggott + Matt Robenolt + Mattia Larentis + Mattia Procopio + Mattias Loverot + mattycakes@gmail.com + Max Burstein + Max Derkachev + Max Smolens + Maxime Lorant + Maxime Turcotte + Maximilian Merz + Maximillian Dornseif + mccutchen@gmail.com + Meghana Bhange + Meir Kriheli + Michael S. Brown + Michael Hall + Michael Josephson + Michael Lissner + Michael Manfre + michael.mcewan@gmail.com + Michael Placentra II + Michael Radziej + Michael Sanders + Michael Schwarz + Michael Sinov + Michael Thornhill + Michal Chruszcz + michal@plovarna.cz + Michał Modzelewski + Mihai Damian + Mihai Preda + Mikaël Barbero + Mike Axiak + Mike Grouchy + Mike Malone + Mike Richardson + Mike Wiacek + Mikhail Korobov + Mikko Hellsing + Mikołaj Siedlarek + milkomeda + Milton Waddams + mitakummaa@gmail.com + mmarshall + Moayad Mardini + Morgan Aubert + Moritz Sichert + Morten Bagai + msaelices + msundstr + Mushtaq Ali + Mykola Zamkovoi + Nadège Michel + Nagy Károly + Nasimul Haque + Nasir Hussain + Natalia Bidart + Nate Bragg + Nathan Gaberel + Neal Norwitz + Nebojša Dorđević + Ned Batchelder + Nena Kojadin + Niall Dalton + Niall Kelly + Nick Efford + Nick Lane + Nick Pope + Nick Presta + Nick Sandford + Nick Sarbicki + Niclas Olofsson + Nicola Larosa + Nicolas Lara + Nicolas Noé + Nikita Marchant + Nikita Sobolev + Niran Babalola + Nis Jørgensen + Nowell Strite + Nuno Mariz + Octavio Peri + oggie rob + oggy + Oliver Beattie + Oliver Rutherfurd + Olivier Le Thanh Duong + Olivier Sels + Olivier Tabone + Orestis Markou + Orne Brocaar + Oscar Ramirez + Ossama M. Khayat + Owen Griffiths + Ömer Faruk Abacı + Pablo Martín + Panos Laganakos + Paolo Melchiorre + Pascal Hartig + Pascal Varet + Patrik Sletmo + Paul Bissex + Paul Collier + Paul Collins + Paul Donohue + Paul Lanier + Paul McLanahan + Paul McMillan + Paulo Poiati + Paulo Scardine + Paul Smith + Pavel Kulikov + pavithran s + Pavlo Kapyshin + permonik@mesias.brnonet.cz + Petar Marić + Pete Crosier + peter@mymart.com + Peter Sheats + Peter van Kampen + Peter Zsoldos + Pete Shinners + Petr Marhoun + Petter Strandmark + pgross@thoughtworks.com + phaedo + phil.h.smith@gmail.com + Philip Lindborg + Philippe Raoult + phil@produxion.net + Piotr Jakimiak + Piotr Lewandowski + plisk + polpak@yahoo.com + pradeep.gowda@gmail.com + Preston Holmes + Preston Timmons + Priyansh Saxena + Przemysław Buczkowski + Przemysław Suliga + Qi Zhao + Rachel Tobin + Rachel Willmer + Radek Švarz + Rafael Giebisch + Raffaele Salmaso + Rahmat Faisal + Rajesh Dhawan + Ramez Ashraf + Ramil Yanbulatov + Ramin Farajpour Cami + Ramiro Morales + Ramon Saraiva + Ram Rachum + Randy Barlow + Raphaël Barrois + Raphael Michel + Raúl Cumplido + Rebecca Smith + Remco Wendt + Renaud Parent + Renbi Yu + Reza Mohammadi + rhettg@gmail.com + Ricardo Javier Cárdenes Medina + ricardojbarrios@gmail.com + Riccardo Di Virgilio + Riccardo Magliocchetti + Richard Davies + Richard House + Rick Wagner + Rigel Di Scala + Robert Coup + Robert Myers + Roberto Aguilar + Robert Rock Howard + Robert Wittams + Rob Golding-Day + Rob Hudson + Rob Nguyen + Robin Munn + Rodrigo Pinheiro Marques de Araújo + Rohith P R + Romain Garrigues + Ronnie van den Crommenacker + Ronny Haryanto + Ross Poulton + Roxane Bellot + Rozza + Rudolph Froger + Rudy Mutter + Rune Rønde Laursen + Russell Cloran + Russell Keith-Magee + Russ Webber + Ryan Hall + Ryan Heard + ryankanno + Ryan Kelly + Ryan Niemeyer + Ryan Petrello + Ryan Rubin + Ryno Mathee + Sachin Jat + Sage M. Abdullah + Sam Newman + Sander Dijkhuis + Sanket Saurav + Sanyam Khurana + Sarah Boyce + Sarthak Mehrish + schwank@gmail.com + Scot Hacker + Scott Barr + Scott Cranfill + Scott Fitsimones + Scott Pashley + scott@staplefish.com + Sean Brant + Sebastian Hillig + Sebastian Spiegel + Segyo Myung + Selwin Ong + Sengtha Chay + Senko Rašić + serbaut@gmail.com + Sergei Maertens + Sergey Fedoseev + Sergey Kolosov + Seth Hill + Shai Berger + Shannon -jj Behrens + Shawn Milochik + Shreya Bamne + Silvan Spross + Simeon Visser + Simon Blanchard + Simon Charette + Simon Greenhill + Simon Litchfield + Simon Meers + Simon Williams + Simon Willison + Sjoerd Job Postmus + Slawek Mikula + sloonz + smurf@smurf.noris.de + sopel + Sreehari K V + Sridhar Marella + Srinivas Reddy Thatiparthy + Stanislas Guerra + Stanislaus Madueke + Stanislav Karpov + starrynight + Stefan R. Filipek + Stefane Fermgier + Stefano Rivera + Stéphane Raimbault + Stephan Jaekel + Stephen Burrows + Steven L. Smith (fvox13) + Steven Noorbergen (Xaroth) + Stuart Langridge + Subhav Gautam + Sujay S Kumar + Sune Kirkeby + Sung-Jin Hong + SuperJared + Susan Tan + Sutrisno Efendi + Swaroop C H + Szilveszter Farkas + Taavi Teska + Tai Lee + Takashi Matsuo + Tareque Hossain + Taylor Mitchell + Terry Huang + thebjorn + Thejaswi Puthraya + Thijs van Dien + Thom Wiggers + Thomas Chaumeny + Thomas Güttler + Thomas Kerpe + Thomas Sorrel + Thomas Steinacher + Thomas Stromberg + Thomas Tanner + tibimicu@gmx.net + Ties Jan Hefting + Tim Allen + Tim Givois + Tim Graham + Tim Heap + Tim McCurrach + Tim Saylor + Tobias Kunze + Tobias McNulty + tobias@neuyork.de + Todd O'Bryan + Tom Carrick + Tom Christie + Tom Forbes + Tom Insam + Tom Tobin + Tom Wojcik + Tomáš Ehrlich + Tomáš Kopeček + Tome Cvitan + Tomek Paczkowski + Tomer Chachamu + Tommy Beadle + Tore Lundqvist + torne-django@wolfpuppy.org.uk + Travis Cline + Travis Pinney + Travis Swicegood + Travis Terry + Trevor Caira + Trey Long + tstromberg@google.com + tt@gurgle.no + Tyler Tarabula + Tyson Clugg + Tyson Tate + Unai Zalakain + Valentina Mukhamedzhanova + valtron + Vasiliy Stavenko + Vasil Vangelovski + Vibhu Agarwal + Victor Andrée + viestards.lists@gmail.com + Viktor Danyliuk + Viktor Grabov + Ville Säävuori + Vinay Karanam + Vinay Sajip + Vincent Foley + Vinny Do + Vitaly Babiy + Vitaliy Yelnik + Vladimir Kuzma + Vlado + Vsevolod Solovyov + Vytis Banaitis + wam-djangobug@wamber.net + Wang Chun + Warren Smith + Waylan Limberg + Wiktor Kołodziej + Wiley Kestner + Wiliam Alves de Souza + Will Ayd + William Schwartz + Will Hardy + Wilson Miner + Wim Glenn + wojtek + Wu Haotian + Xavier Francisco + Xia Kai + Yann Fouillat + Yann Malet + Yash Jhunjhunwala + Yasushi Masuda + ye7cakf02@sneakemail.com + ymasuda@ethercube.com + Yoong Kang Lim + Yusuke Miyazaki + yyyyyyyan + Zac Hatfield-Dodds + Zachary Voase + Zach Liu + Zach Thompson + Zain Memon + Zain Patel + Zak Johnson + Žan Anderle + Zbigniew Siciarz + zegor + Zeynel Özdemir + Zlatko Mašek + zriv + + +A big THANK YOU goes to: + + Rob Curley and Ralph Gage for letting us open-source Django. + + Frank Wiles for making excellent arguments for open-sourcing, and for + his sage sysadmin advice. + + Ian Bicking for convincing Adrian to ditch code generation. + + Mark Pilgrim for "Dive Into Python" (https://www.diveinto.org/python3/). + + Guido van Rossum for creating Python. diff --git a/testbed/django__django/CONTRIBUTING.rst b/testbed/django__django/CONTRIBUTING.rst new file mode 100644 index 0000000000000000000000000000000000000000..4b2ab363660df5b4b43e8e263b70ddb96b1db54e --- /dev/null +++ b/testbed/django__django/CONTRIBUTING.rst @@ -0,0 +1,32 @@ +====================== +Contributing to Django +====================== + +As an open source project, Django welcomes contributions of many forms. + +Examples of contributions include: + +* Code patches +* Documentation improvements +* Bug reports and patch reviews + +Extensive contribution guidelines are available in the repository at +``docs/internals/contributing/``, or online at: + +https://docs.djangoproject.com/en/dev/internals/contributing/ + +**Warning: non-trivial pull requests (anything more than fixing a typo) without +Trac tickets will be closed!** `Please file a ticket`__ to suggest changes. + +__ https://code.djangoproject.com/newticket + +Django uses Trac to keep track of bugs, feature requests, and associated +patches because GitHub doesn't provide adequate tooling for its community. +Patches can be submitted as pull requests, but if you don't file a ticket, +it's unlikely that we'll notice your contribution. + +Code of Conduct +=============== + +As a contributor, you can help us keep the Django community open and inclusive. +Please read and follow our `Code of Conduct `_. diff --git a/testbed/django__django/Gruntfile.js b/testbed/django__django/Gruntfile.js new file mode 100644 index 0000000000000000000000000000000000000000..2d99041cb87c8bc8378811dd814358c4638c1826 --- /dev/null +++ b/testbed/django__django/Gruntfile.js @@ -0,0 +1,15 @@ +'use strict'; + +const globalThreshold = 50; // Global code coverage threshold (as a percentage) + +module.exports = function(grunt) { + grunt.initConfig({ + qunit: { + all: ['js_tests/tests.html'] + } + }); + + grunt.loadNpmTasks('grunt-contrib-qunit'); + grunt.registerTask('test', ['qunit']); + grunt.registerTask('default', ['test']); +}; diff --git a/testbed/django__django/INSTALL b/testbed/django__django/INSTALL new file mode 100644 index 0000000000000000000000000000000000000000..247b0bcdae7bcf0085a2c95e1e03d822c9717103 --- /dev/null +++ b/testbed/django__django/INSTALL @@ -0,0 +1,8 @@ +Thanks for downloading Django. + +To install it, make sure you have Python 3.10 or greater installed. Then run +this command from the command prompt: + + python -m pip install . + +For more detailed instructions, see docs/intro/install.txt. diff --git a/testbed/django__django/LICENSE b/testbed/django__django/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5f4f225dd282aa7e4361ec3c2750bbbaaed8ab1f --- /dev/null +++ b/testbed/django__django/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) Django Software Foundation and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of Django nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/testbed/django__django/LICENSE.python b/testbed/django__django/LICENSE.python new file mode 100644 index 0000000000000000000000000000000000000000..a25ce5cf7b97b76a64a64e4af0ca43cb7f061aff --- /dev/null +++ b/testbed/django__django/LICENSE.python @@ -0,0 +1,290 @@ +Django is licensed under the three-clause BSD license; see the file +LICENSE for details. + +Django includes code from the Python standard library, which is licensed under +the Python license, a permissive open source license. The copyright and license +is included below for compliance with Python's terms. + +---------------------------------------------------------------------- + +Copyright (c) 2001-present Python Software Foundation; All Rights Reserved + +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +Python software and documentation are licensed under the +Python Software Foundation License Version 2. + +Starting with Python 3.8.6, examples, recipes, and other code in +the documentation are dual licensed under the PSF License Version 2 +and the Zero-Clause BSD license. + +Some software incorporated into Python is under different licenses. +The licenses are listed with code falling under that license. + + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +---------------------------------------------------------------------- + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/testbed/django__django/MANIFEST.in b/testbed/django__django/MANIFEST.in new file mode 100644 index 0000000000000000000000000000000000000000..3eacc1860448bfc9d68779090b47bca9c6d0878e --- /dev/null +++ b/testbed/django__django/MANIFEST.in @@ -0,0 +1,17 @@ +include AUTHORS +include Gruntfile.js +include INSTALL +include LICENSE +include LICENSE.python +include MANIFEST.in +include package.json +include tox.ini +include *.rst +graft django +graft docs +graft extras +graft js_tests +graft scripts +graft tests +global-exclude __pycache__ +global-exclude *.py[co] diff --git a/testbed/django__django/README.rst b/testbed/django__django/README.rst new file mode 100644 index 0000000000000000000000000000000000000000..e0baa8a1f7225a587daeeec32d6201866ef5ef10 --- /dev/null +++ b/testbed/django__django/README.rst @@ -0,0 +1,58 @@ +====== +Django +====== + +Django is a high-level Python web framework that encourages rapid development +and clean, pragmatic design. Thanks for checking it out. + +All documentation is in the "``docs``" directory and online at +https://docs.djangoproject.com/en/stable/. If you're just getting started, +here's how we recommend you read the docs: + +* First, read ``docs/intro/install.txt`` for instructions on installing Django. + +* Next, work through the tutorials in order (``docs/intro/tutorial01.txt``, + ``docs/intro/tutorial02.txt``, etc.). + +* If you want to set up an actual deployment server, read + ``docs/howto/deployment/index.txt`` for instructions. + +* You'll probably want to read through the topical guides (in ``docs/topics``) + next; from there you can jump to the HOWTOs (in ``docs/howto``) for specific + problems, and check out the reference (``docs/ref``) for gory details. + +* See ``docs/README`` for instructions on building an HTML version of the docs. + +Docs are updated rigorously. If you find any problems in the docs, or think +they should be clarified in any way, please take 30 seconds to fill out a +ticket here: https://code.djangoproject.com/newticket + +To get more help: + +* Join the ``#django`` channel on ``irc.libera.chat``. Lots of helpful people + hang out there. `Webchat is available `_. + +* Join the django-users mailing list, or read the archives, at + https://groups.google.com/group/django-users. + +* Join the `Django Discord community `_. + +* Join the community on the `Django Forum `_. + +To contribute to Django: + +* Check out https://docs.djangoproject.com/en/dev/internals/contributing/ for + information about getting involved. + +To run Django's test suite: + +* Follow the instructions in the "Unit tests" section of + ``docs/internals/contributing/writing-code/unit-tests.txt``, published online at + https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/#running-the-unit-tests + +Supporting the Development of Django +==================================== + +Django's development depends on your contributions. + +If you depend on Django, remember to support the Django Software Foundation: https://www.djangoproject.com/fundraising/ diff --git a/testbed/django__django/package.json b/testbed/django__django/package.json new file mode 100644 index 0000000000000000000000000000000000000000..0ae77c3dc239903d296967c77288adf0f9c00df3 --- /dev/null +++ b/testbed/django__django/package.json @@ -0,0 +1,19 @@ +{ + "name": "Django", + "private": true, + "scripts": { + "pretest": "eslint .", + "test": "grunt test --verbose" + }, + "engines": { + "npm": ">=1.3.0" + }, + "devDependencies": { + "eslint": "^8.44.0", + "puppeteer": "^19.11.1", + "grunt": "^1.6.1", + "grunt-cli": "^1.4.3", + "grunt-contrib-qunit": "^7.0.0", + "qunit": "^2.19.4" + } +} diff --git a/testbed/django__django/pyproject.toml b/testbed/django__django/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..2c776030cea2dc20b8e5b2332b554977c465d1c7 --- /dev/null +++ b/testbed/django__django/pyproject.toml @@ -0,0 +1,7 @@ +[build-system] +requires = ['setuptools>=40.8.0'] +build-backend = 'setuptools.build_meta' + +[tool.black] +target-version = ['py310'] +force-exclude = 'tests/test_runner_apps/tagged/tests_syntax_error.py' diff --git a/testbed/django__django/setup.cfg b/testbed/django__django/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..8848a1c1156204b19e410e5dc93bc1f8cd671145 --- /dev/null +++ b/testbed/django__django/setup.cfg @@ -0,0 +1,69 @@ +[metadata] +name = Django +version = attr: django.__version__ +url = https://www.djangoproject.com/ +author = Django Software Foundation +author_email = foundation@djangoproject.com +description = A high-level Python web framework that encourages rapid development and clean, pragmatic design. +long_description = file: README.rst +license = BSD-3-Clause +classifiers = + Development Status :: 2 - Pre-Alpha + Environment :: Web Environment + Framework :: Django + Intended Audience :: Developers + License :: OSI Approved :: BSD License + Operating System :: OS Independent + Programming Language :: Python + Programming Language :: Python :: 3 + Programming Language :: Python :: 3 :: Only + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Topic :: Internet :: WWW/HTTP + Topic :: Internet :: WWW/HTTP :: Dynamic Content + Topic :: Internet :: WWW/HTTP :: WSGI + Topic :: Software Development :: Libraries :: Application Frameworks + Topic :: Software Development :: Libraries :: Python Modules +project_urls = + Documentation = https://docs.djangoproject.com/ + Release notes = https://docs.djangoproject.com/en/stable/releases/ + Funding = https://www.djangoproject.com/fundraising/ + Source = https://github.com/django/django + Tracker = https://code.djangoproject.com/ + +[options] +python_requires = >=3.10 +packages = find: +include_package_data = true +zip_safe = false +install_requires = + asgiref >= 3.7.0 + sqlparse >= 0.3.1 + tzdata; sys_platform == 'win32' + +[options.entry_points] +console_scripts = + django-admin = django.core.management:execute_from_command_line + +[options.extras_require] +argon2 = argon2-cffi >= 19.1.0 +bcrypt = bcrypt + +[bdist_rpm] +doc_files = docs extras AUTHORS INSTALL LICENSE README.rst +install_script = scripts/rpm-install.sh + +[flake8] +exclude = build,.git,.tox,./tests/.env +extend-ignore = E203 +max-line-length = 88 +per-file-ignores = + django/core/cache/backends/filebased.py:W601 + django/core/cache/backends/base.py:W601 + django/core/cache/backends/redis.py:W601 + tests/cache/tests.py:W601 + +[isort] +profile = black +default_section = THIRDPARTY +known_first_party = django diff --git a/testbed/django__django/setup.py b/testbed/django__django/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..ef91130d47389904a65747be6c89349ff9e62ef7 --- /dev/null +++ b/testbed/django__django/setup.py @@ -0,0 +1,55 @@ +import os +import site +import sys +from distutils.sysconfig import get_python_lib + +from setuptools import setup + +# Allow editable install into user site directory. +# See https://github.com/pypa/pip/issues/7953. +site.ENABLE_USER_SITE = "--user" in sys.argv[1:] + +# Warn if we are installing over top of an existing installation. This can +# cause issues where files that were deleted from a more recent Django are +# still present in site-packages. See #18115. +overlay_warning = False +if "install" in sys.argv: + lib_paths = [get_python_lib()] + if lib_paths[0].startswith("/usr/lib/"): + # We have to try also with an explicit prefix of /usr/local in order to + # catch Debian's custom user site-packages directory. + lib_paths.append(get_python_lib(prefix="/usr/local")) + for lib_path in lib_paths: + existing_path = os.path.abspath(os.path.join(lib_path, "django")) + if os.path.exists(existing_path): + # We note the need for the warning here, but present it after the + # command is run, so it's more likely to be seen. + overlay_warning = True + break + + +setup() + + +if overlay_warning: + sys.stderr.write( + """ + +======== +WARNING! +======== + +You have just installed Django over top of an existing +installation, without removing it first. Because of this, +your install may now include extraneous files from a +previous version that have since been removed from +Django. This is known to cause a variety of problems. You +should manually remove the + +%(existing_path)s + +directory and re-install Django. + +""" + % {"existing_path": existing_path} + ) diff --git a/testbed/django__django/tests/.coveragerc b/testbed/django__django/tests/.coveragerc new file mode 100644 index 0000000000000000000000000000000000000000..822e6846a9182579d1453c7bb467e925cd4d1a0a --- /dev/null +++ b/testbed/django__django/tests/.coveragerc @@ -0,0 +1,16 @@ +[run] +branch = True +concurrency = multiprocessing,thread +data_file = .coverages/.coverage +omit = + */django/utils/autoreload.py +source = django + +[report] +ignore_errors = True +omit = + */django/conf/locale/* + */tests/* + +[html] +directory = coverage_html diff --git a/testbed/django__django/tests/README.rst b/testbed/django__django/tests/README.rst new file mode 100644 index 0000000000000000000000000000000000000000..86bd6987cf74ee39fadc3744d21d90034e4b4799 --- /dev/null +++ b/testbed/django__django/tests/README.rst @@ -0,0 +1,10 @@ +To run the test suite, first, create and activate a virtual environment. Then +install some requirements and run the tests:: + + $ cd tests + $ python -m pip install -e .. + $ python -m pip install -r requirements/py3.txt + $ ./runtests.py + +For more information about the test suite, see +https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/. diff --git a/testbed/django__django/tests/model_fields/test_field_flags.py b/testbed/django__django/tests/model_fields/test_field_flags.py new file mode 100644 index 0000000000000000000000000000000000000000..33f3334567f92443fcf19809df9db8020498ac24 --- /dev/null +++ b/testbed/django__django/tests/model_fields/test_field_flags.py @@ -0,0 +1,221 @@ +from django import test +from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation +from django.db import models + +from .models import AllFieldsModel + +NON_CONCRETE_FIELDS = ( + models.ForeignObject, + GenericForeignKey, + GenericRelation, +) + +NON_EDITABLE_FIELDS = ( + models.BinaryField, + GenericForeignKey, + GenericRelation, +) + +RELATION_FIELDS = ( + models.ForeignKey, + models.ForeignObject, + models.ManyToManyField, + models.OneToOneField, + GenericForeignKey, + GenericRelation, +) + +MANY_TO_MANY_CLASSES = { + models.ManyToManyField, +} + +MANY_TO_ONE_CLASSES = { + models.ForeignObject, + models.ForeignKey, + GenericForeignKey, +} + +ONE_TO_MANY_CLASSES = { + models.ForeignObjectRel, + models.ManyToOneRel, + GenericRelation, +} + +ONE_TO_ONE_CLASSES = { + models.OneToOneField, +} + +FLAG_PROPERTIES = ( + "concrete", + "editable", + "is_relation", + "model", + "hidden", + "one_to_many", + "many_to_one", + "many_to_many", + "one_to_one", + "related_model", +) + +FLAG_PROPERTIES_FOR_RELATIONS = ( + "one_to_many", + "many_to_one", + "many_to_many", + "one_to_one", +) + + +class FieldFlagsTests(test.SimpleTestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.fields = [ + *AllFieldsModel._meta.fields, + *AllFieldsModel._meta.private_fields, + ] + + cls.all_fields = [ + *cls.fields, + *AllFieldsModel._meta.many_to_many, + *AllFieldsModel._meta.private_fields, + ] + + cls.fields_and_reverse_objects = [ + *cls.all_fields, + *AllFieldsModel._meta.related_objects, + ] + + def test_each_field_should_have_a_concrete_attribute(self): + self.assertTrue(all(f.concrete.__class__ == bool for f in self.fields)) + + def test_each_field_should_have_an_editable_attribute(self): + self.assertTrue(all(f.editable.__class__ == bool for f in self.all_fields)) + + def test_each_field_should_have_a_has_rel_attribute(self): + self.assertTrue(all(f.is_relation.__class__ == bool for f in self.all_fields)) + + def test_each_object_should_have_auto_created(self): + self.assertTrue( + all( + f.auto_created.__class__ == bool + for f in self.fields_and_reverse_objects + ) + ) + + def test_non_concrete_fields(self): + for field in self.fields: + if type(field) in NON_CONCRETE_FIELDS: + self.assertFalse(field.concrete) + else: + self.assertTrue(field.concrete) + + def test_non_editable_fields(self): + for field in self.all_fields: + if type(field) in NON_EDITABLE_FIELDS: + self.assertFalse(field.editable) + else: + self.assertTrue(field.editable) + + def test_related_fields(self): + for field in self.all_fields: + if type(field) in RELATION_FIELDS: + self.assertTrue(field.is_relation) + else: + self.assertFalse(field.is_relation) + + def test_field_names_should_always_be_available(self): + for field in self.fields_and_reverse_objects: + self.assertTrue(field.name) + + def test_all_field_types_should_have_flags(self): + for field in self.fields_and_reverse_objects: + for flag in FLAG_PROPERTIES: + self.assertTrue( + hasattr(field, flag), + "Field %s does not have flag %s" % (field, flag), + ) + if field.is_relation: + true_cardinality_flags = sum( + getattr(field, flag) is True + for flag in FLAG_PROPERTIES_FOR_RELATIONS + ) + # If the field has a relation, there should be only one of the + # 4 cardinality flags available. + self.assertEqual(1, true_cardinality_flags) + + def test_cardinality_m2m(self): + m2m_type_fields = [ + f for f in self.all_fields if f.is_relation and f.many_to_many + ] + # Test classes are what we expect + self.assertEqual(MANY_TO_MANY_CLASSES, {f.__class__ for f in m2m_type_fields}) + + # Ensure all m2m reverses are m2m + for field in m2m_type_fields: + reverse_field = field.remote_field + self.assertTrue(reverse_field.is_relation) + self.assertTrue(reverse_field.many_to_many) + self.assertTrue(reverse_field.related_model) + + def test_cardinality_o2m(self): + o2m_type_fields = [ + f + for f in self.fields_and_reverse_objects + if f.is_relation and f.one_to_many + ] + # Test classes are what we expect + self.assertEqual(ONE_TO_MANY_CLASSES, {f.__class__ for f in o2m_type_fields}) + + # Ensure all o2m reverses are m2o + for field in o2m_type_fields: + if field.concrete: + reverse_field = field.remote_field + self.assertTrue(reverse_field.is_relation and reverse_field.many_to_one) + + def test_cardinality_m2o(self): + m2o_type_fields = [ + f + for f in self.fields_and_reverse_objects + if f.is_relation and f.many_to_one + ] + # Test classes are what we expect + self.assertEqual(MANY_TO_ONE_CLASSES, {f.__class__ for f in m2o_type_fields}) + + # Ensure all m2o reverses are o2m + for obj in m2o_type_fields: + if hasattr(obj, "field"): + reverse_field = obj.field + self.assertTrue(reverse_field.is_relation and reverse_field.one_to_many) + + def test_cardinality_o2o(self): + o2o_type_fields = [f for f in self.all_fields if f.is_relation and f.one_to_one] + # Test classes are what we expect + self.assertEqual(ONE_TO_ONE_CLASSES, {f.__class__ for f in o2o_type_fields}) + + # Ensure all o2o reverses are o2o + for obj in o2o_type_fields: + if hasattr(obj, "field"): + reverse_field = obj.field + self.assertTrue(reverse_field.is_relation and reverse_field.one_to_one) + + def test_hidden_flag(self): + incl_hidden = set(AllFieldsModel._meta.get_fields(include_hidden=True)) + no_hidden = set(AllFieldsModel._meta.get_fields()) + fields_that_should_be_hidden = incl_hidden - no_hidden + for f in incl_hidden: + self.assertEqual(f in fields_that_should_be_hidden, f.hidden) + + def test_model_and_reverse_model_should_equal_on_relations(self): + for field in AllFieldsModel._meta.get_fields(): + is_concrete_forward_field = field.concrete and field.related_model + if is_concrete_forward_field: + reverse_field = field.remote_field + self.assertEqual(field.model, reverse_field.related_model) + self.assertEqual(field.related_model, reverse_field.model) + + def test_null(self): + # null isn't well defined for a ManyToManyField, but changing it to + # True causes backwards compatibility problems (#25320). + self.assertFalse(AllFieldsModel._meta.get_field("m2m").null) + self.assertTrue(AllFieldsModel._meta.get_field("reverse2").null) diff --git a/testbed/django__django/tests/model_fields/test_filepathfield.py b/testbed/django__django/tests/model_fields/test_filepathfield.py new file mode 100644 index 0000000000000000000000000000000000000000..362d12d0db4c74d3e49452d5338e580de2f79e2f --- /dev/null +++ b/testbed/django__django/tests/model_fields/test_filepathfield.py @@ -0,0 +1,22 @@ +import os + +from django.db.models import FilePathField +from django.test import SimpleTestCase + + +class FilePathFieldTests(SimpleTestCase): + def test_path(self): + path = os.path.dirname(__file__) + field = FilePathField(path=path) + self.assertEqual(field.path, path) + self.assertEqual(field.formfield().path, path) + + def test_callable_path(self): + path = os.path.dirname(__file__) + + def generate_path(): + return path + + field = FilePathField(path=generate_path) + self.assertEqual(field.path(), path) + self.assertEqual(field.formfield().path, path) diff --git a/testbed/django__django/tests/model_fields/test_foreignkey.py b/testbed/django__django/tests/model_fields/test_foreignkey.py new file mode 100644 index 0000000000000000000000000000000000000000..ca8eff354078d011043b0b5489604f8f11353116 --- /dev/null +++ b/testbed/django__django/tests/model_fields/test_foreignkey.py @@ -0,0 +1,169 @@ +from decimal import Decimal + +from django.apps import apps +from django.core import checks +from django.core.exceptions import FieldError +from django.db import models +from django.test import TestCase, skipIfDBFeature +from django.test.utils import isolate_apps + +from .models import Bar, FkToChar, Foo, PrimaryKeyCharModel + + +class ForeignKeyTests(TestCase): + def test_callable_default(self): + """A lazy callable may be used for ForeignKey.default.""" + a = Foo.objects.create(id=1, a="abc", d=Decimal("12.34")) + b = Bar.objects.create(b="bcd") + self.assertEqual(b.a, a) + + @skipIfDBFeature("interprets_empty_strings_as_nulls") + def test_empty_string_fk(self): + """ + Empty strings foreign key values don't get converted to None (#19299). + """ + char_model_empty = PrimaryKeyCharModel.objects.create(string="") + fk_model_empty = FkToChar.objects.create(out=char_model_empty) + fk_model_empty = FkToChar.objects.select_related("out").get( + id=fk_model_empty.pk + ) + self.assertEqual(fk_model_empty.out, char_model_empty) + + @isolate_apps("model_fields") + def test_warning_when_unique_true_on_fk(self): + class Foo(models.Model): + pass + + class FKUniqueTrue(models.Model): + fk_field = models.ForeignKey(Foo, models.CASCADE, unique=True) + + model = FKUniqueTrue() + expected_warnings = [ + checks.Warning( + "Setting unique=True on a ForeignKey has the same effect as using a " + "OneToOneField.", + hint=( + "ForeignKey(unique=True) is usually better served by a " + "OneToOneField." + ), + obj=FKUniqueTrue.fk_field.field, + id="fields.W342", + ) + ] + warnings = model.check() + self.assertEqual(warnings, expected_warnings) + + def test_related_name_converted_to_text(self): + rel_name = Bar._meta.get_field("a").remote_field.related_name + self.assertIsInstance(rel_name, str) + + def test_abstract_model_pending_operations(self): + """ + Foreign key fields declared on abstract models should not add lazy + relations to resolve relationship declared as string (#24215). + """ + pending_ops_before = list(apps._pending_operations.items()) + + class AbstractForeignKeyModel(models.Model): + fk = models.ForeignKey("missing.FK", models.CASCADE) + + class Meta: + abstract = True + + self.assertIs(AbstractForeignKeyModel._meta.apps, apps) + self.assertEqual( + pending_ops_before, + list(apps._pending_operations.items()), + "Pending lookup added for a foreign key on an abstract model", + ) + + @isolate_apps("model_fields", "model_fields.tests") + def test_abstract_model_app_relative_foreign_key(self): + class AbstractReferent(models.Model): + reference = models.ForeignKey("Referred", on_delete=models.CASCADE) + + class Meta: + app_label = "model_fields" + abstract = True + + def assert_app_model_resolved(label): + class Referred(models.Model): + class Meta: + app_label = label + + class ConcreteReferent(AbstractReferent): + class Meta: + app_label = label + + self.assertEqual( + ConcreteReferent._meta.get_field("reference").related_model, Referred + ) + + assert_app_model_resolved("model_fields") + assert_app_model_resolved("tests") + + @isolate_apps("model_fields") + def test_to_python(self): + class Foo(models.Model): + pass + + class Bar(models.Model): + fk = models.ForeignKey(Foo, models.CASCADE) + + self.assertEqual(Bar._meta.get_field("fk").to_python("1"), 1) + + @isolate_apps("model_fields") + def test_fk_to_fk_get_col_output_field(self): + class Foo(models.Model): + pass + + class Bar(models.Model): + foo = models.ForeignKey(Foo, models.CASCADE, primary_key=True) + + class Baz(models.Model): + bar = models.ForeignKey(Bar, models.CASCADE, primary_key=True) + + col = Baz._meta.get_field("bar").get_col("alias") + self.assertIs(col.output_field, Foo._meta.pk) + + @isolate_apps("model_fields") + def test_recursive_fks_get_col(self): + class Foo(models.Model): + bar = models.ForeignKey("Bar", models.CASCADE, primary_key=True) + + class Bar(models.Model): + foo = models.ForeignKey(Foo, models.CASCADE, primary_key=True) + + with self.assertRaisesMessage(ValueError, "Cannot resolve output_field"): + Foo._meta.get_field("bar").get_col("alias") + + @isolate_apps("model_fields") + def test_non_local_to_field(self): + class Parent(models.Model): + key = models.IntegerField(unique=True) + + class Child(Parent): + pass + + class Related(models.Model): + child = models.ForeignKey(Child, on_delete=models.CASCADE, to_field="key") + + msg = ( + "'model_fields.Related.child' refers to field 'key' which is not " + "local to model 'model_fields.Child'." + ) + with self.assertRaisesMessage(FieldError, msg): + Related._meta.get_field("child").related_fields + + def test_invalid_to_parameter(self): + msg = ( + "ForeignKey(1) is invalid. First parameter to ForeignKey must be " + "either a model, a model name, or the string 'self'" + ) + with self.assertRaisesMessage(TypeError, msg): + + class MyModel(models.Model): + child = models.ForeignKey(1, models.CASCADE) + + def test_manager_class_getitem(self): + self.assertIs(models.ForeignKey["Foo"], models.ForeignKey) diff --git a/testbed/django__django/tests/model_fields/test_jsonfield.py b/testbed/django__django/tests/model_fields/test_jsonfield.py new file mode 100644 index 0000000000000000000000000000000000000000..4a1cc075b4c42f83cc75d15cd3bcef8104814542 --- /dev/null +++ b/testbed/django__django/tests/model_fields/test_jsonfield.py @@ -0,0 +1,1144 @@ +import operator +import uuid +from unittest import mock + +from django import forms +from django.core import serializers +from django.core.exceptions import ValidationError +from django.core.serializers.json import DjangoJSONEncoder +from django.db import ( + DataError, + IntegrityError, + NotSupportedError, + OperationalError, + connection, + models, +) +from django.db.models import ( + Count, + ExpressionWrapper, + F, + IntegerField, + JSONField, + OuterRef, + Q, + Subquery, + Transform, + Value, +) +from django.db.models.expressions import RawSQL +from django.db.models.fields.json import ( + KT, + KeyTextTransform, + KeyTransform, + KeyTransformFactory, + KeyTransformTextLookupMixin, +) +from django.db.models.functions import Cast +from django.test import SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature +from django.test.utils import CaptureQueriesContext +from django.utils.deprecation import RemovedInDjango51Warning + +from .models import CustomJSONDecoder, JSONModel, NullableJSONModel, RelatedJSONModel + + +@skipUnlessDBFeature("supports_json_field") +class JSONFieldTests(TestCase): + def test_invalid_value(self): + msg = "is not JSON serializable" + with self.assertRaisesMessage(TypeError, msg): + NullableJSONModel.objects.create( + value={ + "uuid": uuid.UUID("d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475"), + } + ) + + def test_custom_encoder_decoder(self): + value = {"uuid": uuid.UUID("{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}")} + obj = NullableJSONModel(value_custom=value) + obj.clean_fields() + obj.save() + obj.refresh_from_db() + self.assertEqual(obj.value_custom, value) + + def test_db_check_constraints(self): + value = "{@!invalid json value 123 $!@#" + with mock.patch.object(DjangoJSONEncoder, "encode", return_value=value): + with self.assertRaises((IntegrityError, DataError, OperationalError)): + NullableJSONModel.objects.create(value_custom=value) + + +class TestMethods(SimpleTestCase): + def test_deconstruct(self): + field = models.JSONField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.JSONField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + + def test_deconstruct_custom_encoder_decoder(self): + field = models.JSONField(encoder=DjangoJSONEncoder, decoder=CustomJSONDecoder) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(kwargs["encoder"], DjangoJSONEncoder) + self.assertEqual(kwargs["decoder"], CustomJSONDecoder) + + def test_get_transforms(self): + @models.JSONField.register_lookup + class MyTransform(Transform): + lookup_name = "my_transform" + + field = models.JSONField() + transform = field.get_transform("my_transform") + self.assertIs(transform, MyTransform) + models.JSONField._unregister_lookup(MyTransform) + transform = field.get_transform("my_transform") + self.assertIsInstance(transform, KeyTransformFactory) + + def test_key_transform_text_lookup_mixin_non_key_transform(self): + transform = Transform("test") + msg = ( + "Transform should be an instance of KeyTransform in order to use " + "this lookup." + ) + with self.assertRaisesMessage(TypeError, msg): + KeyTransformTextLookupMixin(transform) + + def test_get_prep_value(self): + class JSONFieldGetPrepValue(models.JSONField): + def get_prep_value(self, value): + if value is True: + return {"value": True} + return value + + def noop_adapt_json_value(value, encoder): + return value + + field = JSONFieldGetPrepValue() + with mock.patch.object( + connection.ops, "adapt_json_value", noop_adapt_json_value + ): + self.assertEqual( + field.get_db_prep_value(True, connection, prepared=False), + {"value": True}, + ) + self.assertIs( + field.get_db_prep_value(True, connection, prepared=True), True + ) + self.assertEqual(field.get_db_prep_value(1, connection, prepared=False), 1) + + +class TestValidation(SimpleTestCase): + def test_invalid_encoder(self): + msg = "The encoder parameter must be a callable object." + with self.assertRaisesMessage(ValueError, msg): + models.JSONField(encoder=DjangoJSONEncoder()) + + def test_invalid_decoder(self): + msg = "The decoder parameter must be a callable object." + with self.assertRaisesMessage(ValueError, msg): + models.JSONField(decoder=CustomJSONDecoder()) + + def test_validation_error(self): + field = models.JSONField() + msg = "Value must be valid JSON." + value = uuid.UUID("{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}") + with self.assertRaisesMessage(ValidationError, msg): + field.clean({"uuid": value}, None) + + def test_custom_encoder(self): + field = models.JSONField(encoder=DjangoJSONEncoder) + value = uuid.UUID("{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}") + field.clean({"uuid": value}, None) + + +class TestFormField(SimpleTestCase): + def test_formfield(self): + model_field = models.JSONField() + form_field = model_field.formfield() + self.assertIsInstance(form_field, forms.JSONField) + + def test_formfield_custom_encoder_decoder(self): + model_field = models.JSONField( + encoder=DjangoJSONEncoder, decoder=CustomJSONDecoder + ) + form_field = model_field.formfield() + self.assertIs(form_field.encoder, DjangoJSONEncoder) + self.assertIs(form_field.decoder, CustomJSONDecoder) + + +class TestSerialization(SimpleTestCase): + test_data = ( + '[{"fields": {"value": %s}, "model": "model_fields.jsonmodel", "pk": null}]' + ) + test_values = ( + # (Python value, serialized value), + ({"a": "b", "c": None}, '{"a": "b", "c": null}'), + ("abc", '"abc"'), + ('{"a": "a"}', '"{\\"a\\": \\"a\\"}"'), + ) + + def test_dumping(self): + for value, serialized in self.test_values: + with self.subTest(value=value): + instance = JSONModel(value=value) + data = serializers.serialize("json", [instance]) + self.assertJSONEqual(data, self.test_data % serialized) + + def test_loading(self): + for value, serialized in self.test_values: + with self.subTest(value=value): + instance = list( + serializers.deserialize("json", self.test_data % serialized) + )[0].object + self.assertEqual(instance.value, value) + + def test_xml_serialization(self): + test_xml_data = ( + '' + '' + '%s' + "" + ) + for value, serialized in self.test_values: + with self.subTest(value=value): + instance = NullableJSONModel(value=value) + data = serializers.serialize("xml", [instance], fields=["value"]) + self.assertXMLEqual(data, test_xml_data % serialized) + new_instance = list(serializers.deserialize("xml", data))[0].object + self.assertEqual(new_instance.value, instance.value) + + +@skipUnlessDBFeature("supports_json_field") +class TestSaveLoad(TestCase): + def test_null(self): + obj = NullableJSONModel(value=None) + obj.save() + obj.refresh_from_db() + self.assertIsNone(obj.value) + + def test_ambiguous_str_value_deprecation(self): + msg = ( + "Providing an encoded JSON string via Value() is deprecated. Use Value([], " + "output_field=JSONField()) instead." + ) + with self.assertWarnsMessage(RemovedInDjango51Warning, msg): + obj = NullableJSONModel.objects.create(value=Value("[]")) + obj.refresh_from_db() + self.assertEqual(obj.value, []) + + @skipUnlessDBFeature("supports_primitives_in_json_field") + def test_value_str_primitives_deprecation(self): + msg = ( + "Providing an encoded JSON string via Value() is deprecated. Use " + "Value(None, output_field=JSONField()) instead." + ) + with self.assertWarnsMessage(RemovedInDjango51Warning, msg): + obj = NullableJSONModel.objects.create(value=Value("null")) + obj.refresh_from_db() + self.assertIsNone(obj.value) + obj = NullableJSONModel.objects.create(value=Value("invalid-json")) + obj.refresh_from_db() + self.assertEqual(obj.value, "invalid-json") + + @skipUnlessDBFeature("supports_primitives_in_json_field") + def test_json_null_different_from_sql_null(self): + json_null = NullableJSONModel.objects.create(value=Value(None, JSONField())) + NullableJSONModel.objects.update(value=Value(None, JSONField())) + json_null.refresh_from_db() + sql_null = NullableJSONModel.objects.create(value=None) + sql_null.refresh_from_db() + # 'null' is not equal to NULL in the database. + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value=Value(None, JSONField())), + [json_null], + ) + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value=None), + [json_null], + ) + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__isnull=True), + [sql_null], + ) + # 'null' is equal to NULL in Python (None). + self.assertEqual(json_null.value, sql_null.value) + + @skipUnlessDBFeature("supports_primitives_in_json_field") + def test_primitives(self): + values = [ + True, + 1, + 1.45, + "String", + "", + ] + for value in values: + with self.subTest(value=value): + obj = JSONModel(value=value) + obj.save() + obj.refresh_from_db() + self.assertEqual(obj.value, value) + + def test_dict(self): + values = [ + {}, + {"name": "John", "age": 20, "height": 180.3}, + {"a": True, "b": {"b1": False, "b2": None}}, + ] + for value in values: + with self.subTest(value=value): + obj = JSONModel.objects.create(value=value) + obj.refresh_from_db() + self.assertEqual(obj.value, value) + + def test_list(self): + values = [ + [], + ["John", 20, 180.3], + [True, [False, None]], + ] + for value in values: + with self.subTest(value=value): + obj = JSONModel.objects.create(value=value) + obj.refresh_from_db() + self.assertEqual(obj.value, value) + + def test_realistic_object(self): + value = { + "name": "John", + "age": 20, + "pets": [ + {"name": "Kit", "type": "cat", "age": 2}, + {"name": "Max", "type": "dog", "age": 1}, + ], + "courses": [ + ["A1", "A2", "A3"], + ["B1", "B2"], + ["C1"], + ], + } + obj = JSONModel.objects.create(value=value) + obj.refresh_from_db() + self.assertEqual(obj.value, value) + + +@skipUnlessDBFeature("supports_json_field") +class TestQuerying(TestCase): + @classmethod + def setUpTestData(cls): + cls.primitives = [True, False, "yes", 7, 9.6] + values = [ + None, + [], + {}, + {"a": "b", "c": 14}, + { + "a": "b", + "c": 14, + "d": ["e", {"f": "g"}], + "h": True, + "i": False, + "j": None, + "k": {"l": "m"}, + "n": [None, True, False], + "o": '"quoted"', + "p": 4.2, + "r": {"s": True, "t": False}, + }, + [1, [2]], + {"k": True, "l": False, "foo": "bax"}, + { + "foo": "bar", + "baz": {"a": "b", "c": "d"}, + "bar": ["foo", "bar"], + "bax": {"foo": "bar"}, + }, + ] + cls.objs = [NullableJSONModel.objects.create(value=value) for value in values] + if connection.features.supports_primitives_in_json_field: + cls.objs.extend( + [ + NullableJSONModel.objects.create(value=value) + for value in cls.primitives + ] + ) + cls.raw_sql = "%s::jsonb" if connection.vendor == "postgresql" else "%s" + + def test_exact(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__exact={}), + [self.objs[2]], + ) + + def test_exact_complex(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__exact={"a": "b", "c": 14}), + [self.objs[3]], + ) + + def test_icontains(self): + self.assertCountEqual( + NullableJSONModel.objects.filter(value__icontains="BaX"), + self.objs[6:8], + ) + + def test_isnull(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__isnull=True), + [self.objs[0]], + ) + + def test_ordering_by_transform(self): + mariadb = connection.vendor == "mysql" and connection.mysql_is_mariadb + values = [ + {"ord": 93, "name": "bar"}, + {"ord": 22.1, "name": "foo"}, + {"ord": -1, "name": "baz"}, + {"ord": 21.931902, "name": "spam"}, + {"ord": -100291029, "name": "eggs"}, + ] + for field_name in ["value", "value_custom"]: + with self.subTest(field=field_name): + objs = [ + NullableJSONModel.objects.create(**{field_name: value}) + for value in values + ] + query = NullableJSONModel.objects.filter( + **{"%s__name__isnull" % field_name: False}, + ).order_by("%s__ord" % field_name) + expected = [objs[4], objs[2], objs[3], objs[1], objs[0]] + if mariadb or connection.vendor == "oracle": + # MariaDB and Oracle return JSON values as strings. + expected = [objs[2], objs[4], objs[3], objs[1], objs[0]] + self.assertSequenceEqual(query, expected) + + def test_ordering_grouping_by_key_transform(self): + base_qs = NullableJSONModel.objects.filter(value__d__0__isnull=False) + for qs in ( + base_qs.order_by("value__d__0"), + base_qs.annotate( + key=KeyTransform("0", KeyTransform("d", "value")) + ).order_by("key"), + ): + self.assertSequenceEqual(qs, [self.objs[4]]) + none_val = "" if connection.features.interprets_empty_strings_as_nulls else None + qs = NullableJSONModel.objects.filter(value__isnull=False) + self.assertQuerySetEqual( + qs.filter(value__isnull=False) + .annotate(key=KT("value__d__1__f")) + .values("key") + .annotate(count=Count("key")) + .order_by("count"), + [(none_val, 0), ("g", 1)], + operator.itemgetter("key", "count"), + ) + + def test_ordering_grouping_by_count(self): + qs = ( + NullableJSONModel.objects.filter( + value__isnull=False, + ) + .values("value__d__0") + .annotate(count=Count("value__d__0")) + .order_by("count") + ) + self.assertQuerySetEqual(qs, [0, 1], operator.itemgetter("count")) + + def test_order_grouping_custom_decoder(self): + NullableJSONModel.objects.create(value_custom={"a": "b"}) + qs = NullableJSONModel.objects.filter(value_custom__isnull=False) + self.assertSequenceEqual( + qs.values( + "value_custom__a", + ) + .annotate( + count=Count("id"), + ) + .order_by("value_custom__a"), + [{"value_custom__a": "b", "count": 1}], + ) + + def test_key_transform_raw_expression(self): + expr = RawSQL(self.raw_sql, ['{"x": "bar"}']) + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__foo=KeyTransform("x", expr)), + [self.objs[7]], + ) + + def test_nested_key_transform_raw_expression(self): + expr = RawSQL(self.raw_sql, ['{"x": {"y": "bar"}}']) + self.assertSequenceEqual( + NullableJSONModel.objects.filter( + value__foo=KeyTransform("y", KeyTransform("x", expr)) + ), + [self.objs[7]], + ) + + def test_key_transform_expression(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__d__0__isnull=False) + .annotate( + key=KeyTransform("d", "value"), + chain=KeyTransform("0", "key"), + expr=KeyTransform("0", Cast("key", models.JSONField())), + ) + .filter(chain=F("expr")), + [self.objs[4]], + ) + + def test_key_transform_annotation_expression(self): + obj = NullableJSONModel.objects.create(value={"d": ["e", "e"]}) + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__d__0__isnull=False) + .annotate( + key=F("value__d"), + chain=F("key__0"), + expr=Cast("key", models.JSONField()), + ) + .filter(chain=F("expr__1")), + [obj], + ) + + def test_nested_key_transform_expression(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__d__0__isnull=False) + .annotate( + key=KeyTransform("d", "value"), + chain=KeyTransform("f", KeyTransform("1", "key")), + expr=KeyTransform( + "f", KeyTransform("1", Cast("key", models.JSONField())) + ), + ) + .filter(chain=F("expr")), + [self.objs[4]], + ) + + def test_nested_key_transform_annotation_expression(self): + obj = NullableJSONModel.objects.create( + value={"d": ["e", {"f": "g"}, {"f": "g"}]}, + ) + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__d__0__isnull=False) + .annotate( + key=F("value__d"), + chain=F("key__1__f"), + expr=Cast("key", models.JSONField()), + ) + .filter(chain=F("expr__2__f")), + [obj], + ) + + def test_nested_key_transform_on_subquery(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__d__0__isnull=False) + .annotate( + subquery_value=Subquery( + NullableJSONModel.objects.filter(pk=OuterRef("pk")).values("value") + ), + key=KeyTransform("d", "subquery_value"), + chain=KeyTransform("f", KeyTransform("1", "key")), + ) + .filter(chain="g"), + [self.objs[4]], + ) + + def test_key_text_transform_char_lookup(self): + qs = NullableJSONModel.objects.annotate( + char_value=KeyTextTransform("foo", "value"), + ).filter(char_value__startswith="bar") + self.assertSequenceEqual(qs, [self.objs[7]]) + + qs = NullableJSONModel.objects.annotate( + char_value=KeyTextTransform(1, KeyTextTransform("bar", "value")), + ).filter(char_value__startswith="bar") + self.assertSequenceEqual(qs, [self.objs[7]]) + + def test_expression_wrapper_key_transform(self): + self.assertCountEqual( + NullableJSONModel.objects.annotate( + expr=ExpressionWrapper( + KeyTransform("c", "value"), + output_field=IntegerField(), + ), + ).filter(expr__isnull=False), + self.objs[3:5], + ) + + def test_has_key(self): + self.assertCountEqual( + NullableJSONModel.objects.filter(value__has_key="a"), + [self.objs[3], self.objs[4]], + ) + + def test_has_key_null_value(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__has_key="j"), + [self.objs[4]], + ) + + def test_has_key_deep(self): + tests = [ + (Q(value__baz__has_key="a"), self.objs[7]), + ( + Q(value__has_key=KeyTransform("a", KeyTransform("baz", "value"))), + self.objs[7], + ), + (Q(value__has_key=F("value__baz__a")), self.objs[7]), + ( + Q(value__has_key=KeyTransform("c", KeyTransform("baz", "value"))), + self.objs[7], + ), + (Q(value__has_key=F("value__baz__c")), self.objs[7]), + (Q(value__d__1__has_key="f"), self.objs[4]), + ( + Q( + value__has_key=KeyTransform( + "f", KeyTransform("1", KeyTransform("d", "value")) + ) + ), + self.objs[4], + ), + (Q(value__has_key=F("value__d__1__f")), self.objs[4]), + ] + for condition, expected in tests: + with self.subTest(condition=condition): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(condition), + [expected], + ) + + def test_has_key_list(self): + obj = NullableJSONModel.objects.create(value=[{"a": 1}, {"b": "x"}]) + tests = [ + Q(value__1__has_key="b"), + Q(value__has_key=KeyTransform("b", KeyTransform(1, "value"))), + Q(value__has_key=KeyTransform("b", KeyTransform("1", "value"))), + Q(value__has_key=F("value__1__b")), + ] + for condition in tests: + with self.subTest(condition=condition): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(condition), + [obj], + ) + + def test_has_keys(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__has_keys=["a", "c", "h"]), + [self.objs[4]], + ) + + def test_has_any_keys(self): + self.assertCountEqual( + NullableJSONModel.objects.filter(value__has_any_keys=["c", "l"]), + [self.objs[3], self.objs[4], self.objs[6]], + ) + + def test_has_key_number(self): + obj = NullableJSONModel.objects.create( + value={ + "123": "value", + "nested": {"456": "bar", "lorem": "abc", "999": True}, + "array": [{"789": "baz", "777": "def", "ipsum": 200}], + "000": "val", + } + ) + tests = [ + Q(value__has_key="123"), + Q(value__nested__has_key="456"), + Q(value__array__0__has_key="789"), + Q(value__has_keys=["nested", "123", "array", "000"]), + Q(value__nested__has_keys=["lorem", "999", "456"]), + Q(value__array__0__has_keys=["789", "ipsum", "777"]), + Q(value__has_any_keys=["000", "nonexistent"]), + Q(value__nested__has_any_keys=["999", "nonexistent"]), + Q(value__array__0__has_any_keys=["777", "nonexistent"]), + ] + for condition in tests: + with self.subTest(condition=condition): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(condition), + [obj], + ) + + @skipUnlessDBFeature("supports_json_field_contains") + def test_contains(self): + tests = [ + ({}, self.objs[2:5] + self.objs[6:8]), + ({"baz": {"a": "b", "c": "d"}}, [self.objs[7]]), + ({"baz": {"a": "b"}}, [self.objs[7]]), + ({"baz": {"c": "d"}}, [self.objs[7]]), + ({"k": True, "l": False}, [self.objs[6]]), + ({"d": ["e", {"f": "g"}]}, [self.objs[4]]), + ({"d": ["e"]}, [self.objs[4]]), + ({"d": [{"f": "g"}]}, [self.objs[4]]), + ([1, [2]], [self.objs[5]]), + ([1], [self.objs[5]]), + ([[2]], [self.objs[5]]), + ({"n": [None, True, False]}, [self.objs[4]]), + ({"j": None}, [self.objs[4]]), + ] + for value, expected in tests: + with self.subTest(value=value): + qs = NullableJSONModel.objects.filter(value__contains=value) + self.assertCountEqual(qs, expected) + + @skipIfDBFeature("supports_json_field_contains") + def test_contains_unsupported(self): + msg = "contains lookup is not supported on this database backend." + with self.assertRaisesMessage(NotSupportedError, msg): + NullableJSONModel.objects.filter( + value__contains={"baz": {"a": "b", "c": "d"}}, + ).get() + + @skipUnlessDBFeature( + "supports_primitives_in_json_field", + "supports_json_field_contains", + ) + def test_contains_primitives(self): + for value in self.primitives: + with self.subTest(value=value): + qs = NullableJSONModel.objects.filter(value__contains=value) + self.assertIs(qs.exists(), True) + + @skipUnlessDBFeature("supports_json_field_contains") + def test_contained_by(self): + qs = NullableJSONModel.objects.filter( + value__contained_by={"a": "b", "c": 14, "h": True} + ) + self.assertCountEqual(qs, self.objs[2:4]) + + @skipIfDBFeature("supports_json_field_contains") + def test_contained_by_unsupported(self): + msg = "contained_by lookup is not supported on this database backend." + with self.assertRaisesMessage(NotSupportedError, msg): + NullableJSONModel.objects.filter(value__contained_by={"a": "b"}).get() + + def test_deep_values(self): + qs = NullableJSONModel.objects.values_list("value__k__l").order_by("pk") + expected_objs = [(None,)] * len(self.objs) + expected_objs[4] = ("m",) + self.assertSequenceEqual(qs, expected_objs) + + @skipUnlessDBFeature("can_distinct_on_fields") + def test_deep_distinct(self): + query = NullableJSONModel.objects.distinct("value__k__l").values_list( + "value__k__l" + ) + self.assertSequenceEqual(query, [("m",), (None,)]) + + def test_isnull_key(self): + # key__isnull=False works the same as has_key='key'. + self.assertCountEqual( + NullableJSONModel.objects.filter(value__a__isnull=True), + self.objs[:3] + self.objs[5:], + ) + self.assertCountEqual( + NullableJSONModel.objects.filter(value__j__isnull=True), + self.objs[:4] + self.objs[5:], + ) + self.assertCountEqual( + NullableJSONModel.objects.filter(value__a__isnull=False), + [self.objs[3], self.objs[4]], + ) + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__j__isnull=False), + [self.objs[4]], + ) + + def test_isnull_key_or_none(self): + obj = NullableJSONModel.objects.create(value={"a": None}) + self.assertCountEqual( + NullableJSONModel.objects.filter( + Q(value__a__isnull=True) | Q(value__a=None) + ), + self.objs[:3] + self.objs[5:] + [obj], + ) + + def test_none_key(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__j=None), + [self.objs[4]], + ) + + def test_none_key_exclude(self): + obj = NullableJSONModel.objects.create(value={"j": 1}) + if connection.vendor == "oracle": + # Oracle supports filtering JSON objects with NULL keys, but the + # current implementation doesn't support it. + self.assertSequenceEqual( + NullableJSONModel.objects.exclude(value__j=None), + self.objs[1:4] + self.objs[5:] + [obj], + ) + else: + self.assertSequenceEqual( + NullableJSONModel.objects.exclude(value__j=None), [obj] + ) + + def test_shallow_list_lookup(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__0=1), + [self.objs[5]], + ) + + def test_shallow_obj_lookup(self): + self.assertCountEqual( + NullableJSONModel.objects.filter(value__a="b"), + [self.objs[3], self.objs[4]], + ) + + def test_obj_subquery_lookup(self): + qs = NullableJSONModel.objects.annotate( + field=Subquery( + NullableJSONModel.objects.filter(pk=OuterRef("pk")).values("value") + ), + ).filter(field__a="b") + self.assertCountEqual(qs, [self.objs[3], self.objs[4]]) + + def test_deep_lookup_objs(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__k__l="m"), + [self.objs[4]], + ) + + def test_shallow_lookup_obj_target(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__k={"l": "m"}), + [self.objs[4]], + ) + + def test_deep_lookup_array(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__1__0=2), + [self.objs[5]], + ) + + def test_deep_lookup_mixed(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__d__1__f="g"), + [self.objs[4]], + ) + + def test_deep_lookup_transform(self): + self.assertCountEqual( + NullableJSONModel.objects.filter(value__c__gt=2), + [self.objs[3], self.objs[4]], + ) + self.assertCountEqual( + NullableJSONModel.objects.filter(value__c__gt=2.33), + [self.objs[3], self.objs[4]], + ) + self.assertIs(NullableJSONModel.objects.filter(value__c__lt=5).exists(), False) + + def test_lookup_exclude(self): + tests = [ + (Q(value__a="b"), [self.objs[0]]), + (Q(value__foo="bax"), [self.objs[0], self.objs[7]]), + ] + for condition, expected in tests: + self.assertCountEqual( + NullableJSONModel.objects.exclude(condition), + expected, + ) + self.assertCountEqual( + NullableJSONModel.objects.filter(~condition), + expected, + ) + + def test_lookup_exclude_nonexistent_key(self): + # Values without the key are ignored. + condition = Q(value__foo="bax") + objs_with_value = [self.objs[6]] + objs_with_different_value = [self.objs[0], self.objs[7]] + self.assertCountEqual( + NullableJSONModel.objects.exclude(condition), + objs_with_different_value, + ) + self.assertSequenceEqual( + NullableJSONModel.objects.exclude(~condition), + objs_with_value, + ) + self.assertCountEqual( + NullableJSONModel.objects.filter(condition | ~condition), + objs_with_value + objs_with_different_value, + ) + self.assertCountEqual( + NullableJSONModel.objects.exclude(condition & ~condition), + objs_with_value + objs_with_different_value, + ) + # Add the __isnull lookup to get an exhaustive set. + self.assertCountEqual( + NullableJSONModel.objects.exclude(condition & Q(value__foo__isnull=False)), + self.objs[0:6] + self.objs[7:], + ) + self.assertSequenceEqual( + NullableJSONModel.objects.filter(condition & Q(value__foo__isnull=False)), + objs_with_value, + ) + + def test_usage_in_subquery(self): + self.assertCountEqual( + NullableJSONModel.objects.filter( + id__in=NullableJSONModel.objects.filter(value__c=14), + ), + self.objs[3:5], + ) + + @skipUnlessDBFeature("supports_json_field_contains") + def test_array_key_contains(self): + tests = [ + ([], [self.objs[7]]), + ("bar", [self.objs[7]]), + (["bar"], [self.objs[7]]), + ("ar", []), + ] + for value, expected in tests: + with self.subTest(value=value): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__bar__contains=value), + expected, + ) + + def test_key_iexact(self): + self.assertIs( + NullableJSONModel.objects.filter(value__foo__iexact="BaR").exists(), True + ) + self.assertIs( + NullableJSONModel.objects.filter(value__foo__iexact='"BaR"').exists(), False + ) + + def test_key_in(self): + tests = [ + ("value__c__in", [14], self.objs[3:5]), + ("value__c__in", [14, 15], self.objs[3:5]), + ("value__0__in", [1], [self.objs[5]]), + ("value__0__in", [1, 3], [self.objs[5]]), + ("value__foo__in", ["bar"], [self.objs[7]]), + ( + "value__foo__in", + [KeyTransform("foo", KeyTransform("bax", "value"))], + [self.objs[7]], + ), + ("value__foo__in", [F("value__bax__foo")], [self.objs[7]]), + ( + "value__foo__in", + [KeyTransform("foo", KeyTransform("bax", "value")), "baz"], + [self.objs[7]], + ), + ("value__foo__in", [F("value__bax__foo"), "baz"], [self.objs[7]]), + ("value__foo__in", ["bar", "baz"], [self.objs[7]]), + ("value__bar__in", [["foo", "bar"]], [self.objs[7]]), + ("value__bar__in", [["foo", "bar"], ["a"]], [self.objs[7]]), + ("value__bax__in", [{"foo": "bar"}, {"a": "b"}], [self.objs[7]]), + ("value__h__in", [True, "foo"], [self.objs[4]]), + ("value__i__in", [False, "foo"], [self.objs[4]]), + ] + for lookup, value, expected in tests: + with self.subTest(lookup=lookup, value=value): + self.assertCountEqual( + NullableJSONModel.objects.filter(**{lookup: value}), + expected, + ) + + def test_key_values(self): + qs = NullableJSONModel.objects.filter(value__h=True) + tests = [ + ("value__a", "b"), + ("value__c", 14), + ("value__d", ["e", {"f": "g"}]), + ("value__h", True), + ("value__i", False), + ("value__j", None), + ("value__k", {"l": "m"}), + ("value__n", [None, True, False]), + ("value__p", 4.2), + ("value__r", {"s": True, "t": False}), + ] + for lookup, expected in tests: + with self.subTest(lookup=lookup): + self.assertEqual(qs.values_list(lookup, flat=True).get(), expected) + + def test_key_values_boolean(self): + qs = NullableJSONModel.objects.filter(value__h=True, value__i=False) + tests = [ + ("value__h", True), + ("value__i", False), + ] + for lookup, expected in tests: + with self.subTest(lookup=lookup): + self.assertIs(qs.values_list(lookup, flat=True).get(), expected) + + @skipUnlessDBFeature("supports_json_field_contains") + def test_key_contains(self): + self.assertIs( + NullableJSONModel.objects.filter(value__foo__contains="ar").exists(), False + ) + self.assertIs( + NullableJSONModel.objects.filter(value__foo__contains="bar").exists(), True + ) + + def test_key_icontains(self): + self.assertIs( + NullableJSONModel.objects.filter(value__foo__icontains="Ar").exists(), True + ) + + def test_key_startswith(self): + self.assertIs( + NullableJSONModel.objects.filter(value__foo__startswith="b").exists(), True + ) + + def test_key_istartswith(self): + self.assertIs( + NullableJSONModel.objects.filter(value__foo__istartswith="B").exists(), True + ) + + def test_key_endswith(self): + self.assertIs( + NullableJSONModel.objects.filter(value__foo__endswith="r").exists(), True + ) + + def test_key_iendswith(self): + self.assertIs( + NullableJSONModel.objects.filter(value__foo__iendswith="R").exists(), True + ) + + def test_key_regex(self): + self.assertIs( + NullableJSONModel.objects.filter(value__foo__regex=r"^bar$").exists(), True + ) + + def test_key_iregex(self): + self.assertIs( + NullableJSONModel.objects.filter(value__foo__iregex=r"^bAr$").exists(), True + ) + + def test_key_quoted_string(self): + self.assertEqual( + NullableJSONModel.objects.filter(value__o='"quoted"').get(), + self.objs[4], + ) + + @skipUnlessDBFeature("has_json_operators") + def test_key_sql_injection(self): + with CaptureQueriesContext(connection) as queries: + self.assertIs( + NullableJSONModel.objects.filter( + **{ + """value__test' = '"a"') OR 1 = 1 OR ('d""": "x", + } + ).exists(), + False, + ) + self.assertIn( + """."value" -> 'test'' = ''"a"'') OR 1 = 1 OR (''d') = '"x"'""", + queries[0]["sql"], + ) + + @skipIfDBFeature("has_json_operators") + def test_key_sql_injection_escape(self): + query = str( + JSONModel.objects.filter( + **{ + """value__test") = '"a"' OR 1 = 1 OR ("d""": "x", + } + ).query + ) + self.assertIn('"test\\"', query) + self.assertIn('\\"d', query) + + def test_key_escape(self): + obj = NullableJSONModel.objects.create(value={"%total": 10}) + self.assertEqual( + NullableJSONModel.objects.filter(**{"value__%total": 10}).get(), obj + ) + + def test_none_key_and_exact_lookup(self): + self.assertSequenceEqual( + NullableJSONModel.objects.filter(value__a="b", value__j=None), + [self.objs[4]], + ) + + def test_lookups_with_key_transform(self): + tests = ( + ("value__baz__has_key", "c"), + ("value__baz__has_keys", ["a", "c"]), + ("value__baz__has_any_keys", ["a", "x"]), + ("value__has_key", KeyTextTransform("foo", "value")), + ) + for lookup, value in tests: + with self.subTest(lookup=lookup): + self.assertIs( + NullableJSONModel.objects.filter( + **{lookup: value}, + ).exists(), + True, + ) + + @skipUnlessDBFeature("supports_json_field_contains") + def test_contains_contained_by_with_key_transform(self): + tests = [ + ("value__d__contains", "e"), + ("value__d__contains", [{"f": "g"}]), + ("value__contains", KeyTransform("bax", "value")), + ("value__contains", F("value__bax")), + ("value__baz__contains", {"a": "b"}), + ("value__baz__contained_by", {"a": "b", "c": "d", "e": "f"}), + ( + "value__contained_by", + KeyTransform( + "x", + RawSQL( + self.raw_sql, + ['{"x": {"a": "b", "c": 1, "d": "e"}}'], + ), + ), + ), + ] + # For databases where {'f': 'g'} (without surrounding []) matches + # [{'f': 'g'}]. + if not connection.features.json_key_contains_list_matching_requires_list: + tests.append(("value__d__contains", {"f": "g"})) + for lookup, value in tests: + with self.subTest(lookup=lookup, value=value): + self.assertIs( + NullableJSONModel.objects.filter( + **{lookup: value}, + ).exists(), + True, + ) + + def test_join_key_transform_annotation_expression(self): + related_obj = RelatedJSONModel.objects.create( + value={"d": ["f", "e"]}, + json_model=self.objs[4], + ) + RelatedJSONModel.objects.create( + value={"d": ["e", "f"]}, + json_model=self.objs[4], + ) + self.assertSequenceEqual( + RelatedJSONModel.objects.annotate( + key=F("value__d"), + related_key=F("json_model__value__d"), + chain=F("key__1"), + expr=Cast("key", models.JSONField()), + ).filter(chain=F("related_key__0")), + [related_obj], + ) + + def test_key_text_transform_from_lookup(self): + qs = NullableJSONModel.objects.annotate(b=KT("value__bax__foo")).filter( + b__contains="ar", + ) + self.assertSequenceEqual(qs, [self.objs[7]]) + qs = NullableJSONModel.objects.annotate(c=KT("value__o")).filter( + c__contains="uot", + ) + self.assertSequenceEqual(qs, [self.objs[4]]) + + def test_key_text_transform_from_lookup_invalid(self): + msg = "Lookup must contain key or index transforms." + with self.assertRaisesMessage(ValueError, msg): + KT("value") + with self.assertRaisesMessage(ValueError, msg): + KT("") diff --git a/testbed/django__django/tests/model_fields/tests.py b/testbed/django__django/tests/model_fields/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..fe8526a4800b91e3b2f166ddf436d2584ac493fa --- /dev/null +++ b/testbed/django__django/tests/model_fields/tests.py @@ -0,0 +1,376 @@ +import pickle + +from django import forms +from django.core.exceptions import ValidationError +from django.db import models +from django.test import SimpleTestCase, TestCase +from django.utils.functional import lazy + +from .models import ( + Bar, + Choiceful, + Foo, + RenamedField, + VerboseNameField, + Whiz, + WhizDelayed, + WhizIter, + WhizIterEmpty, +) + + +class Nested: + class Field(models.Field): + pass + + +class BasicFieldTests(SimpleTestCase): + def test_show_hidden_initial(self): + """ + Fields with choices respect show_hidden_initial as a kwarg to + formfield(). + """ + choices = [(0, 0), (1, 1)] + model_field = models.Field(choices=choices) + form_field = model_field.formfield(show_hidden_initial=True) + self.assertTrue(form_field.show_hidden_initial) + + form_field = model_field.formfield(show_hidden_initial=False) + self.assertFalse(form_field.show_hidden_initial) + + def test_field_repr(self): + """ + __repr__() of a field displays its name. + """ + f = Foo._meta.get_field("a") + self.assertEqual(repr(f), "") + f = models.fields.CharField() + self.assertEqual(repr(f), "") + + def test_field_repr_nested(self): + """__repr__() uses __qualname__ for nested class support.""" + self.assertEqual(repr(Nested.Field()), "") + + def test_field_name(self): + """ + A defined field name (name="fieldname") is used instead of the model + model's attribute name (modelname). + """ + instance = RenamedField() + self.assertTrue(hasattr(instance, "get_fieldname_display")) + self.assertFalse(hasattr(instance, "get_modelname_display")) + + def test_field_verbose_name(self): + m = VerboseNameField + for i in range(1, 22): + self.assertEqual( + m._meta.get_field("field%d" % i).verbose_name, "verbose field%d" % i + ) + + self.assertEqual(m._meta.get_field("id").verbose_name, "verbose pk") + + def test_choices_form_class(self): + """Can supply a custom choices form class to Field.formfield()""" + choices = [("a", "a")] + field = models.CharField(choices=choices) + klass = forms.TypedMultipleChoiceField + self.assertIsInstance(field.formfield(choices_form_class=klass), klass) + + def test_formfield_disabled(self): + """Field.formfield() sets disabled for fields with choices.""" + field = models.CharField(choices=[("a", "b")]) + form_field = field.formfield(disabled=True) + self.assertIs(form_field.disabled, True) + + def test_field_str(self): + f = models.Field() + self.assertEqual(str(f), "") + f = Foo._meta.get_field("a") + self.assertEqual(str(f), "model_fields.Foo.a") + + def test_field_ordering(self): + """Fields are ordered based on their creation.""" + f1 = models.Field() + f2 = models.Field(auto_created=True) + f3 = models.Field() + self.assertLess(f2, f1) + self.assertGreater(f3, f1) + self.assertIsNotNone(f1) + self.assertNotIn(f2, (None, 1, "")) + + def test_field_instance_is_picklable(self): + """Field instances can be pickled.""" + field = models.Field(max_length=100, default="a string") + # Must be picklable with this cached property populated (#28188). + field._get_default + pickle.dumps(field) + + def test_deconstruct_nested_field(self): + """deconstruct() uses __qualname__ for nested class support.""" + name, path, args, kwargs = Nested.Field().deconstruct() + self.assertEqual(path, "model_fields.tests.Nested.Field") + + def test_abstract_inherited_fields(self): + """Field instances from abstract models are not equal.""" + + class AbstractModel(models.Model): + field = models.IntegerField() + + class Meta: + abstract = True + + class InheritAbstractModel1(AbstractModel): + pass + + class InheritAbstractModel2(AbstractModel): + pass + + abstract_model_field = AbstractModel._meta.get_field("field") + inherit1_model_field = InheritAbstractModel1._meta.get_field("field") + inherit2_model_field = InheritAbstractModel2._meta.get_field("field") + + self.assertNotEqual(abstract_model_field, inherit1_model_field) + self.assertNotEqual(abstract_model_field, inherit2_model_field) + self.assertNotEqual(inherit1_model_field, inherit2_model_field) + + self.assertLess(abstract_model_field, inherit1_model_field) + self.assertLess(abstract_model_field, inherit2_model_field) + self.assertLess(inherit1_model_field, inherit2_model_field) + + def test_hash_immutability(self): + field = models.IntegerField() + field_hash = hash(field) + + class MyModel(models.Model): + rank = field + + self.assertEqual(field_hash, hash(field)) + + +class ChoicesTests(SimpleTestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.no_choices = Choiceful._meta.get_field("no_choices") + cls.empty_choices = Choiceful._meta.get_field("empty_choices") + cls.empty_choices_bool = Choiceful._meta.get_field("empty_choices_bool") + cls.empty_choices_text = Choiceful._meta.get_field("empty_choices_text") + cls.with_choices = Choiceful._meta.get_field("with_choices") + cls.choices_from_enum = Choiceful._meta.get_field("choices_from_enum") + + def test_choices(self): + self.assertIsNone(self.no_choices.choices) + self.assertEqual(self.empty_choices.choices, ()) + self.assertEqual(self.with_choices.choices, [(1, "A")]) + + def test_flatchoices(self): + self.assertEqual(self.no_choices.flatchoices, []) + self.assertEqual(self.empty_choices.flatchoices, []) + self.assertEqual(self.with_choices.flatchoices, [(1, "A")]) + + def test_check(self): + self.assertEqual(Choiceful.check(), []) + + def test_invalid_choice(self): + model_instance = None # Actual model instance not needed. + self.no_choices.validate(0, model_instance) + msg = "['Value 99 is not a valid choice.']" + with self.assertRaisesMessage(ValidationError, msg): + self.empty_choices.validate(99, model_instance) + with self.assertRaisesMessage(ValidationError, msg): + self.with_choices.validate(99, model_instance) + + def test_formfield(self): + no_choices_formfield = self.no_choices.formfield() + self.assertIsInstance(no_choices_formfield, forms.IntegerField) + fields = ( + self.empty_choices, + self.with_choices, + self.empty_choices_bool, + self.empty_choices_text, + ) + for field in fields: + with self.subTest(field=field): + self.assertIsInstance(field.formfield(), forms.ChoiceField) + + def test_choices_from_enum(self): + # Choices class was transparently resolved when given as argument. + self.assertEqual(self.choices_from_enum.choices, Choiceful.Suit.choices) + + +class GetFieldDisplayTests(SimpleTestCase): + def test_choices_and_field_display(self): + """ + get_choices() interacts with get_FIELD_display() to return the expected + values. + """ + self.assertEqual(Whiz(c=1).get_c_display(), "First") # A nested value + self.assertEqual(Whiz(c=0).get_c_display(), "Other") # A top level value + self.assertEqual(Whiz(c=9).get_c_display(), 9) # Invalid value + self.assertIsNone(Whiz(c=None).get_c_display()) # Blank value + self.assertEqual(Whiz(c="").get_c_display(), "") # Empty value + self.assertEqual(WhizDelayed(c=0).get_c_display(), "Other") # Delayed choices + + def test_get_FIELD_display_translated(self): + """A translated display value is coerced to str.""" + val = Whiz(c=5).get_c_display() + self.assertIsInstance(val, str) + self.assertEqual(val, "translated") + + def test_overriding_FIELD_display(self): + class FooBar(models.Model): + foo_bar = models.IntegerField(choices=[(1, "foo"), (2, "bar")]) + + def get_foo_bar_display(self): + return "something" + + f = FooBar(foo_bar=1) + self.assertEqual(f.get_foo_bar_display(), "something") + + def test_overriding_inherited_FIELD_display(self): + class Base(models.Model): + foo = models.CharField(max_length=254, choices=[("A", "Base A")]) + + class Meta: + abstract = True + + class Child(Base): + foo = models.CharField( + max_length=254, choices=[("A", "Child A"), ("B", "Child B")] + ) + + self.assertEqual(Child(foo="A").get_foo_display(), "Child A") + self.assertEqual(Child(foo="B").get_foo_display(), "Child B") + + def test_iterator_choices(self): + """ + get_choices() works with Iterators. + """ + self.assertEqual(WhizIter(c=1).c, 1) # A nested value + self.assertEqual(WhizIter(c=9).c, 9) # Invalid value + self.assertIsNone(WhizIter(c=None).c) # Blank value + self.assertEqual(WhizIter(c="").c, "") # Empty value + + def test_empty_iterator_choices(self): + """ + get_choices() works with empty iterators. + """ + self.assertEqual(WhizIterEmpty(c="a").c, "a") # A nested value + self.assertEqual(WhizIterEmpty(c="b").c, "b") # Invalid value + self.assertIsNone(WhizIterEmpty(c=None).c) # Blank value + self.assertEqual(WhizIterEmpty(c="").c, "") # Empty value + + +class GetChoicesTests(SimpleTestCase): + def test_empty_choices(self): + choices = [] + f = models.CharField(choices=choices) + self.assertEqual(f.get_choices(include_blank=False), choices) + + def test_blank_in_choices(self): + choices = [("", "<><>"), ("a", "A")] + f = models.CharField(choices=choices) + self.assertEqual(f.get_choices(include_blank=True), choices) + + def test_blank_in_grouped_choices(self): + choices = [ + ("f", "Foo"), + ("b", "Bar"), + ( + "Group", + ( + ("", "No Preference"), + ("fg", "Foo"), + ("bg", "Bar"), + ), + ), + ] + f = models.CharField(choices=choices) + self.assertEqual(f.get_choices(include_blank=True), choices) + + def test_lazy_strings_not_evaluated(self): + lazy_func = lazy(lambda x: 0 / 0, int) # raises ZeroDivisionError if evaluated. + f = models.CharField(choices=[(lazy_func("group"), (("a", "A"), ("b", "B")))]) + self.assertEqual(f.get_choices(include_blank=True)[0], ("", "---------")) + + +class GetChoicesOrderingTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.foo1 = Foo.objects.create(a="a", d="12.35") + cls.foo2 = Foo.objects.create(a="b", d="12.34") + cls.bar1 = Bar.objects.create(a=cls.foo1, b="b") + cls.bar2 = Bar.objects.create(a=cls.foo2, b="a") + cls.field = Bar._meta.get_field("a") + + def assertChoicesEqual(self, choices, objs): + self.assertEqual(choices, [(obj.pk, str(obj)) for obj in objs]) + + def test_get_choices(self): + self.assertChoicesEqual( + self.field.get_choices(include_blank=False, ordering=("a",)), + [self.foo1, self.foo2], + ) + self.assertChoicesEqual( + self.field.get_choices(include_blank=False, ordering=("-a",)), + [self.foo2, self.foo1], + ) + + def test_get_choices_default_ordering(self): + self.addCleanup(setattr, Foo._meta, "ordering", Foo._meta.ordering) + Foo._meta.ordering = ("d",) + self.assertChoicesEqual( + self.field.get_choices(include_blank=False), [self.foo2, self.foo1] + ) + + def test_get_choices_reverse_related_field(self): + self.assertChoicesEqual( + self.field.remote_field.get_choices(include_blank=False, ordering=("a",)), + [self.bar1, self.bar2], + ) + self.assertChoicesEqual( + self.field.remote_field.get_choices(include_blank=False, ordering=("-a",)), + [self.bar2, self.bar1], + ) + + def test_get_choices_reverse_related_field_default_ordering(self): + self.addCleanup(setattr, Bar._meta, "ordering", Bar._meta.ordering) + Bar._meta.ordering = ("b",) + self.assertChoicesEqual( + self.field.remote_field.get_choices(include_blank=False), + [self.bar2, self.bar1], + ) + + +class GetChoicesLimitChoicesToTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.foo1 = Foo.objects.create(a="a", d="12.34") + cls.foo2 = Foo.objects.create(a="b", d="12.34") + cls.bar1 = Bar.objects.create(a=cls.foo1, b="b") + cls.bar2 = Bar.objects.create(a=cls.foo2, b="a") + cls.field = Bar._meta.get_field("a") + + def assertChoicesEqual(self, choices, objs): + self.assertCountEqual(choices, [(obj.pk, str(obj)) for obj in objs]) + + def test_get_choices(self): + self.assertChoicesEqual( + self.field.get_choices(include_blank=False, limit_choices_to={"a": "a"}), + [self.foo1], + ) + self.assertChoicesEqual( + self.field.get_choices(include_blank=False, limit_choices_to={}), + [self.foo1, self.foo2], + ) + + def test_get_choices_reverse_related_field(self): + field = self.field.remote_field + self.assertChoicesEqual( + field.get_choices(include_blank=False, limit_choices_to={"b": "b"}), + [self.bar1], + ) + self.assertChoicesEqual( + field.get_choices(include_blank=False, limit_choices_to={}), + [self.bar1, self.bar2], + ) diff --git a/testbed/django__django/tests/model_forms/__init__.py b/testbed/django__django/tests/model_forms/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_forms/models.py b/testbed/django__django/tests/model_forms/models.py new file mode 100644 index 0000000000000000000000000000000000000000..b6da15f48ac8964843361a698cf4de6c0c8d901c --- /dev/null +++ b/testbed/django__django/tests/model_forms/models.py @@ -0,0 +1,523 @@ +import datetime +import os +import tempfile +import uuid + +from django.core import validators +from django.core.exceptions import ValidationError +from django.core.files.storage import FileSystemStorage +from django.db import models + +temp_storage_dir = tempfile.mkdtemp() +temp_storage = FileSystemStorage(temp_storage_dir) + + +class Person(models.Model): + name = models.CharField(max_length=100) + + +class Category(models.Model): + name = models.CharField(max_length=20) + slug = models.SlugField(max_length=20) + url = models.CharField("The URL", max_length=40) + + class Meta: + ordering = ("pk",) + + def __str__(self): + return self.name + + def __repr__(self): + return self.__str__() + + +class WriterManager(models.Manager): + def get_queryset(self): + qs = super().get_queryset() + return qs.filter(archived=False) + + +class Writer(models.Model): + name = models.CharField(max_length=50, help_text="Use both first and last names.") + archived = models.BooleanField(default=False, editable=False) + + objects = WriterManager() + + class Meta: + ordering = ("name",) + + def __str__(self): + return self.name + + +class Article(models.Model): + ARTICLE_STATUS = ( + (1, "Draft"), + (2, "Pending"), + (3, "Live"), + ) + headline = models.CharField(max_length=50) + slug = models.SlugField() + pub_date = models.DateField() + created = models.DateField(editable=False) + writer = models.ForeignKey(Writer, models.CASCADE) + article = models.TextField() + categories = models.ManyToManyField(Category, blank=True) + status = models.PositiveIntegerField(choices=ARTICLE_STATUS, blank=True, null=True) + + def save(self, *args, **kwargs): + if not self.id: + self.created = datetime.date.today() + return super().save(*args, **kwargs) + + def __str__(self): + return self.headline + + +class ImprovedArticle(models.Model): + article = models.OneToOneField(Article, models.CASCADE) + + +class ImprovedArticleWithParentLink(models.Model): + article = models.OneToOneField(Article, models.CASCADE, parent_link=True) + + +class BetterWriter(Writer): + score = models.IntegerField() + + +class Publication(models.Model): + title = models.CharField(max_length=30) + date_published = models.DateField() + + def __str__(self): + return self.title + + +def default_mode(): + return "di" + + +def default_category(): + return 3 + + +class PublicationDefaults(models.Model): + MODE_CHOICES = (("di", "direct"), ("de", "delayed")) + CATEGORY_CHOICES = ((1, "Games"), (2, "Comics"), (3, "Novel")) + title = models.CharField(max_length=30) + date_published = models.DateField(default=datetime.date.today) + datetime_published = models.DateTimeField(default=datetime.datetime(2000, 1, 1)) + mode = models.CharField(max_length=2, choices=MODE_CHOICES, default=default_mode) + category = models.IntegerField(choices=CATEGORY_CHOICES, default=default_category) + active = models.BooleanField(default=True) + file = models.FileField(default="default.txt") + + +class Author(models.Model): + publication = models.OneToOneField( + Publication, models.SET_NULL, null=True, blank=True + ) + full_name = models.CharField(max_length=255) + + +class Author1(models.Model): + publication = models.OneToOneField(Publication, models.CASCADE, null=False) + full_name = models.CharField(max_length=255) + + +class WriterProfile(models.Model): + writer = models.OneToOneField(Writer, models.CASCADE, primary_key=True) + age = models.PositiveIntegerField() + + def __str__(self): + return "%s is %s" % (self.writer, self.age) + + +class Document(models.Model): + myfile = models.FileField(upload_to="unused", blank=True) + + +class TextFile(models.Model): + description = models.CharField(max_length=20) + file = models.FileField(storage=temp_storage, upload_to="tests", max_length=15) + + def __str__(self): + return self.description + + +class CustomFileField(models.FileField): + def save_form_data(self, instance, data): + been_here = getattr(self, "been_saved", False) + assert not been_here, "save_form_data called more than once" + setattr(self, "been_saved", True) + + +class CustomFF(models.Model): + f = CustomFileField(upload_to="unused", blank=True) + + +class FilePathModel(models.Model): + path = models.FilePathField( + path=os.path.dirname(__file__), match="models.py", blank=True + ) + + +try: + from PIL import Image # NOQA: detect if Pillow is installed + + test_images = True + + class ImageFile(models.Model): + def custom_upload_path(self, filename): + path = self.path or "tests" + return "%s/%s" % (path, filename) + + description = models.CharField(max_length=20) + + # Deliberately put the image field *after* the width/height fields to + # trigger the bug in #10404 with width/height not getting assigned. + width = models.IntegerField(editable=False) + height = models.IntegerField(editable=False) + image = models.ImageField( + storage=temp_storage, + upload_to=custom_upload_path, + width_field="width", + height_field="height", + ) + path = models.CharField(max_length=16, blank=True, default="") + + def __str__(self): + return self.description + + class OptionalImageFile(models.Model): + def custom_upload_path(self, filename): + path = self.path or "tests" + return "%s/%s" % (path, filename) + + description = models.CharField(max_length=20) + image = models.ImageField( + storage=temp_storage, + upload_to=custom_upload_path, + width_field="width", + height_field="height", + blank=True, + null=True, + ) + width = models.IntegerField(editable=False, null=True) + height = models.IntegerField(editable=False, null=True) + path = models.CharField(max_length=16, blank=True, default="") + + def __str__(self): + return self.description + + class NoExtensionImageFile(models.Model): + def upload_to(self, filename): + return "tests/no_extension" + + description = models.CharField(max_length=20) + image = models.ImageField(storage=temp_storage, upload_to=upload_to) + + def __str__(self): + return self.description + +except ImportError: + test_images = False + + +class Homepage(models.Model): + url = models.URLField() + + +class Product(models.Model): + slug = models.SlugField(unique=True) + + def __str__(self): + return self.slug + + +class Price(models.Model): + price = models.DecimalField(max_digits=10, decimal_places=2) + quantity = models.PositiveIntegerField() + + class Meta: + unique_together = (("price", "quantity"),) + + def __str__(self): + return "%s for %s" % (self.quantity, self.price) + + +class Triple(models.Model): + left = models.IntegerField() + middle = models.IntegerField() + right = models.IntegerField() + + class Meta: + unique_together = (("left", "middle"), ("middle", "right")) + + +class ArticleStatus(models.Model): + ARTICLE_STATUS_CHAR = ( + ("d", "Draft"), + ("p", "Pending"), + ("l", "Live"), + ) + status = models.CharField( + max_length=2, choices=ARTICLE_STATUS_CHAR, blank=True, null=True + ) + + +class Inventory(models.Model): + barcode = models.PositiveIntegerField(unique=True) + parent = models.ForeignKey( + "self", models.SET_NULL, to_field="barcode", blank=True, null=True + ) + name = models.CharField(blank=False, max_length=20) + + class Meta: + ordering = ("name",) + + def __str__(self): + return self.name + + def __repr__(self): + return self.__str__() + + +class Book(models.Model): + title = models.CharField(max_length=40) + author = models.ForeignKey(Writer, models.SET_NULL, blank=True, null=True) + special_id = models.IntegerField(blank=True, null=True, unique=True) + + class Meta: + unique_together = ("title", "author") + + +class BookXtra(models.Model): + isbn = models.CharField(max_length=16, unique=True) + suffix1 = models.IntegerField(blank=True, default=0) + suffix2 = models.IntegerField(blank=True, default=0) + + class Meta: + unique_together = ("suffix1", "suffix2") + abstract = True + + +class DerivedBook(Book, BookXtra): + pass + + +class ExplicitPK(models.Model): + key = models.CharField(max_length=20, primary_key=True) + desc = models.CharField(max_length=20, blank=True, unique=True) + + class Meta: + unique_together = ("key", "desc") + + def __str__(self): + return self.key + + +class Post(models.Model): + title = models.CharField(max_length=50, unique_for_date="posted", blank=True) + slug = models.CharField(max_length=50, unique_for_year="posted", blank=True) + subtitle = models.CharField(max_length=50, unique_for_month="posted", blank=True) + posted = models.DateField() + + def __str__(self): + return self.title + + +class DateTimePost(models.Model): + title = models.CharField(max_length=50, unique_for_date="posted", blank=True) + slug = models.CharField(max_length=50, unique_for_year="posted", blank=True) + subtitle = models.CharField(max_length=50, unique_for_month="posted", blank=True) + posted = models.DateTimeField(editable=False) + + def __str__(self): + return self.title + + +class DerivedPost(Post): + pass + + +class BigInt(models.Model): + biggie = models.BigIntegerField() + + def __str__(self): + return str(self.biggie) + + +class MarkupField(models.CharField): + def __init__(self, *args, **kwargs): + kwargs["max_length"] = 20 + super().__init__(*args, **kwargs) + + def formfield(self, **kwargs): + # don't allow this field to be used in form (real use-case might be + # that you know the markup will always be X, but it is among an app + # that allows the user to say it could be something else) + # regressed at r10062 + return None + + +class CustomFieldForExclusionModel(models.Model): + name = models.CharField(max_length=10) + markup = MarkupField() + + +class FlexibleDatePost(models.Model): + title = models.CharField(max_length=50, unique_for_date="posted", blank=True) + slug = models.CharField(max_length=50, unique_for_year="posted", blank=True) + subtitle = models.CharField(max_length=50, unique_for_month="posted", blank=True) + posted = models.DateField(blank=True, null=True) + + +class Colour(models.Model): + name = models.CharField(max_length=50) + + def __iter__(self): + yield from range(5) + + def __str__(self): + return self.name + + +class ColourfulItem(models.Model): + name = models.CharField(max_length=50) + colours = models.ManyToManyField(Colour) + + +class CustomErrorMessage(models.Model): + name1 = models.CharField( + max_length=50, + validators=[validators.validate_slug], + error_messages={"invalid": "Model custom error message."}, + ) + name2 = models.CharField( + max_length=50, + validators=[validators.validate_slug], + error_messages={"invalid": "Model custom error message."}, + ) + + def clean(self): + if self.name1 == "FORBIDDEN_VALUE": + raise ValidationError( + {"name1": [ValidationError("Model.clean() error messages.")]} + ) + elif self.name1 == "FORBIDDEN_VALUE2": + raise ValidationError( + {"name1": "Model.clean() error messages (simpler syntax)."} + ) + elif self.name1 == "GLOBAL_ERROR": + raise ValidationError("Global error message.") + + +def today_callable_dict(): + return {"last_action__gte": datetime.datetime.today()} + + +def today_callable_q(): + return models.Q(last_action__gte=datetime.datetime.today()) + + +class Character(models.Model): + username = models.CharField(max_length=100) + last_action = models.DateTimeField() + + def __str__(self): + return self.username + + +class StumpJoke(models.Model): + most_recently_fooled = models.ForeignKey( + Character, + models.CASCADE, + limit_choices_to=today_callable_dict, + related_name="jokes", + ) + has_fooled_today = models.ManyToManyField( + Character, + limit_choices_to=today_callable_q, + related_name="jokes_today", + ) + funny = models.BooleanField(default=False) + + +# Model for #13776 +class Student(models.Model): + character = models.ForeignKey(Character, models.CASCADE) + study = models.CharField(max_length=30) + + +# Model for #639 +class Photo(models.Model): + title = models.CharField(max_length=30) + image = models.FileField(storage=temp_storage, upload_to="tests") + + # Support code for the tests; this keeps track of how many times save() + # gets called on each instance. + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._savecount = 0 + + def save(self, force_insert=False, force_update=False): + super().save(force_insert, force_update) + self._savecount += 1 + + +class UUIDPK(models.Model): + uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + name = models.CharField(max_length=30) + + +# Models for #24706 +class StrictAssignmentFieldSpecific(models.Model): + title = models.CharField(max_length=30) + _should_error = False + + def __setattr__(self, key, value): + if self._should_error is True: + raise ValidationError(message={key: "Cannot set attribute"}, code="invalid") + super().__setattr__(key, value) + + +class StrictAssignmentAll(models.Model): + title = models.CharField(max_length=30) + _should_error = False + + def __setattr__(self, key, value): + if self._should_error is True: + raise ValidationError(message="Cannot set attribute", code="invalid") + super().__setattr__(key, value) + + +# A model with ForeignKey(blank=False, null=True) +class Award(models.Model): + name = models.CharField(max_length=30) + character = models.ForeignKey(Character, models.SET_NULL, blank=False, null=True) + + +class NullableUniqueCharFieldModel(models.Model): + codename = models.CharField(max_length=50, blank=True, null=True, unique=True) + email = models.EmailField(blank=True, null=True) + slug = models.SlugField(blank=True, null=True) + url = models.URLField(blank=True, null=True) + + +class Number(models.Model): + value = models.IntegerField() + + +class NumbersToDice(models.Model): + number = models.ForeignKey("Number", on_delete=models.CASCADE) + die = models.ForeignKey("Dice", on_delete=models.CASCADE) + + +class Dice(models.Model): + numbers = models.ManyToManyField( + Number, + through=NumbersToDice, + limit_choices_to=models.Q(value__gte=1), + ) diff --git a/testbed/django__django/tests/model_forms/test_uuid.py b/testbed/django__django/tests/model_forms/test_uuid.py new file mode 100644 index 0000000000000000000000000000000000000000..583b3fea9487c2800d914fe7fc0b0221f7b392d2 --- /dev/null +++ b/testbed/django__django/tests/model_forms/test_uuid.py @@ -0,0 +1,35 @@ +from django import forms +from django.core.exceptions import ValidationError +from django.test import TestCase + +from .models import UUIDPK + + +class UUIDPKForm(forms.ModelForm): + class Meta: + model = UUIDPK + fields = "__all__" + + +class ModelFormBaseTest(TestCase): + def test_create_save_error(self): + form = UUIDPKForm({}) + self.assertFalse(form.is_valid()) + msg = "The UUIDPK could not be created because the data didn't validate." + with self.assertRaisesMessage(ValueError, msg): + form.save() + + def test_update_save_error(self): + obj = UUIDPK.objects.create(name="foo") + form = UUIDPKForm({}, instance=obj) + self.assertFalse(form.is_valid()) + msg = "The UUIDPK could not be changed because the data didn't validate." + with self.assertRaisesMessage(ValueError, msg): + form.save() + + def test_model_multiple_choice_field_uuid_pk(self): + f = forms.ModelMultipleChoiceField(UUIDPK.objects.all()) + with self.assertRaisesMessage( + ValidationError, "“invalid_uuid” is not a valid UUID." + ): + f.clean(["invalid_uuid"]) diff --git a/testbed/django__django/tests/model_forms/tests.py b/testbed/django__django/tests/model_forms/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..d1716ce2017c4b9184492acd25727b04fffa9724 --- /dev/null +++ b/testbed/django__django/tests/model_forms/tests.py @@ -0,0 +1,3688 @@ +import datetime +import os +from decimal import Decimal +from unittest import mock, skipUnless + +from django import forms +from django.core.exceptions import ( + NON_FIELD_ERRORS, + FieldError, + ImproperlyConfigured, + ValidationError, +) +from django.core.files.uploadedfile import SimpleUploadedFile +from django.db import connection, models +from django.db.models.query import EmptyQuerySet +from django.forms.models import ( + ModelFormMetaclass, + construct_instance, + fields_for_model, + model_to_dict, + modelform_factory, +) +from django.template import Context, Template +from django.test import SimpleTestCase, TestCase, ignore_warnings, skipUnlessDBFeature +from django.test.utils import isolate_apps +from django.utils.deprecation import RemovedInDjango60Warning + +from .models import ( + Article, + ArticleStatus, + Author, + Author1, + Award, + BetterWriter, + BigInt, + Book, + Category, + Character, + Colour, + ColourfulItem, + CustomErrorMessage, + CustomFF, + CustomFieldForExclusionModel, + DateTimePost, + DerivedBook, + DerivedPost, + Dice, + Document, + ExplicitPK, + FilePathModel, + FlexibleDatePost, + Homepage, + ImprovedArticle, + ImprovedArticleWithParentLink, + Inventory, + NullableUniqueCharFieldModel, + Number, + Person, + Photo, + Post, + Price, + Product, + Publication, + PublicationDefaults, + StrictAssignmentAll, + StrictAssignmentFieldSpecific, + Student, + StumpJoke, + TextFile, + Triple, + Writer, + WriterProfile, + test_images, +) + +if test_images: + from .models import ImageFile, NoExtensionImageFile, OptionalImageFile + + class ImageFileForm(forms.ModelForm): + class Meta: + model = ImageFile + fields = "__all__" + + class OptionalImageFileForm(forms.ModelForm): + class Meta: + model = OptionalImageFile + fields = "__all__" + + class NoExtensionImageFileForm(forms.ModelForm): + class Meta: + model = NoExtensionImageFile + fields = "__all__" + + +class ProductForm(forms.ModelForm): + class Meta: + model = Product + fields = "__all__" + + +class PriceForm(forms.ModelForm): + class Meta: + model = Price + fields = "__all__" + + +class BookForm(forms.ModelForm): + class Meta: + model = Book + fields = "__all__" + + +class DerivedBookForm(forms.ModelForm): + class Meta: + model = DerivedBook + fields = "__all__" + + +class ExplicitPKForm(forms.ModelForm): + class Meta: + model = ExplicitPK + fields = ( + "key", + "desc", + ) + + +class PostForm(forms.ModelForm): + class Meta: + model = Post + fields = "__all__" + + +class DerivedPostForm(forms.ModelForm): + class Meta: + model = DerivedPost + fields = "__all__" + + +class CustomWriterForm(forms.ModelForm): + name = forms.CharField(required=False) + + class Meta: + model = Writer + fields = "__all__" + + +class BaseCategoryForm(forms.ModelForm): + class Meta: + model = Category + fields = "__all__" + + +class ArticleForm(forms.ModelForm): + class Meta: + model = Article + fields = "__all__" + + +class RoykoForm(forms.ModelForm): + class Meta: + model = Writer + fields = "__all__" + + +class ArticleStatusForm(forms.ModelForm): + class Meta: + model = ArticleStatus + fields = "__all__" + + +class InventoryForm(forms.ModelForm): + class Meta: + model = Inventory + fields = "__all__" + + +class SelectInventoryForm(forms.Form): + items = forms.ModelMultipleChoiceField( + Inventory.objects.all(), to_field_name="barcode" + ) + + +class CustomFieldForExclusionForm(forms.ModelForm): + class Meta: + model = CustomFieldForExclusionModel + fields = ["name", "markup"] + + +class TextFileForm(forms.ModelForm): + class Meta: + model = TextFile + fields = "__all__" + + +class BigIntForm(forms.ModelForm): + class Meta: + model = BigInt + fields = "__all__" + + +class ModelFormWithMedia(forms.ModelForm): + class Media: + js = ("/some/form/javascript",) + css = {"all": ("/some/form/css",)} + + class Meta: + model = TextFile + fields = "__all__" + + +class CustomErrorMessageForm(forms.ModelForm): + name1 = forms.CharField(error_messages={"invalid": "Form custom error message."}) + + class Meta: + fields = "__all__" + model = CustomErrorMessage + + +class ModelFormBaseTest(TestCase): + def test_base_form(self): + self.assertEqual(list(BaseCategoryForm.base_fields), ["name", "slug", "url"]) + + def test_no_model_class(self): + class NoModelModelForm(forms.ModelForm): + pass + + with self.assertRaisesMessage( + ValueError, "ModelForm has no model class specified." + ): + NoModelModelForm() + + def test_empty_fields_to_fields_for_model(self): + """ + An argument of fields=() to fields_for_model should return an empty dictionary + """ + field_dict = fields_for_model(Person, fields=()) + self.assertEqual(len(field_dict), 0) + + def test_fields_for_model_form_fields(self): + form_declared_fields = CustomWriterForm.declared_fields + field_dict = fields_for_model( + Writer, + fields=["name"], + form_declared_fields=form_declared_fields, + ) + self.assertIs(field_dict["name"], form_declared_fields["name"]) + + def test_empty_fields_on_modelform(self): + """ + No fields on a ModelForm should actually result in no fields. + """ + + class EmptyPersonForm(forms.ModelForm): + class Meta: + model = Person + fields = () + + form = EmptyPersonForm() + self.assertEqual(len(form.fields), 0) + + def test_empty_fields_to_construct_instance(self): + """ + No fields should be set on a model instance if construct_instance + receives fields=(). + """ + form = modelform_factory(Person, fields="__all__")({"name": "John Doe"}) + self.assertTrue(form.is_valid()) + instance = construct_instance(form, Person(), fields=()) + self.assertEqual(instance.name, "") + + def test_blank_with_null_foreign_key_field(self): + """ + #13776 -- ModelForm's with models having a FK set to null=False and + required=False should be valid. + """ + + class FormForTestingIsValid(forms.ModelForm): + class Meta: + model = Student + fields = "__all__" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fields["character"].required = False + + char = Character.objects.create( + username="user", last_action=datetime.datetime.today() + ) + data = {"study": "Engineering"} + data2 = {"study": "Engineering", "character": char.pk} + + # form is valid because required=False for field 'character' + f1 = FormForTestingIsValid(data) + self.assertTrue(f1.is_valid()) + + f2 = FormForTestingIsValid(data2) + self.assertTrue(f2.is_valid()) + obj = f2.save() + self.assertEqual(obj.character, char) + + def test_blank_false_with_null_true_foreign_key_field(self): + """ + A ModelForm with a model having ForeignKey(blank=False, null=True) + and the form field set to required=False should allow the field to be + unset. + """ + + class AwardForm(forms.ModelForm): + class Meta: + model = Award + fields = "__all__" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fields["character"].required = False + + character = Character.objects.create( + username="user", last_action=datetime.datetime.today() + ) + award = Award.objects.create(name="Best sprinter", character=character) + data = {"name": "Best tester", "character": ""} # remove character + form = AwardForm(data=data, instance=award) + self.assertTrue(form.is_valid()) + award = form.save() + self.assertIsNone(award.character) + + def test_blank_foreign_key_with_radio(self): + class BookForm(forms.ModelForm): + class Meta: + model = Book + fields = ["author"] + widgets = {"author": forms.RadioSelect()} + + writer = Writer.objects.create(name="Joe Doe") + form = BookForm() + self.assertEqual( + list(form.fields["author"].choices), + [ + ("", "---------"), + (writer.pk, "Joe Doe"), + ], + ) + + def test_non_blank_foreign_key_with_radio(self): + class AwardForm(forms.ModelForm): + class Meta: + model = Award + fields = ["character"] + widgets = {"character": forms.RadioSelect()} + + character = Character.objects.create( + username="user", + last_action=datetime.datetime.today(), + ) + form = AwardForm() + self.assertEqual( + list(form.fields["character"].choices), + [(character.pk, "user")], + ) + + def test_save_blank_false_with_required_false(self): + """ + A ModelForm with a model with a field set to blank=False and the form + field set to required=False should allow the field to be unset. + """ + obj = Writer.objects.create(name="test") + form = CustomWriterForm(data={"name": ""}, instance=obj) + self.assertTrue(form.is_valid()) + obj = form.save() + self.assertEqual(obj.name, "") + + @ignore_warnings(category=RemovedInDjango60Warning) + def test_save_blank_null_unique_charfield_saves_null(self): + form_class = modelform_factory( + model=NullableUniqueCharFieldModel, fields="__all__" + ) + empty_value = ( + "" if connection.features.interprets_empty_strings_as_nulls else None + ) + data = { + "codename": "", + "email": "", + "slug": "", + "url": "", + } + form = form_class(data=data) + self.assertTrue(form.is_valid()) + form.save() + self.assertEqual(form.instance.codename, empty_value) + self.assertEqual(form.instance.email, empty_value) + self.assertEqual(form.instance.slug, empty_value) + self.assertEqual(form.instance.url, empty_value) + + # Save a second form to verify there isn't a unique constraint violation. + form = form_class(data=data) + self.assertTrue(form.is_valid()) + form.save() + self.assertEqual(form.instance.codename, empty_value) + self.assertEqual(form.instance.email, empty_value) + self.assertEqual(form.instance.slug, empty_value) + self.assertEqual(form.instance.url, empty_value) + + def test_missing_fields_attribute(self): + message = ( + "Creating a ModelForm without either the 'fields' attribute " + "or the 'exclude' attribute is prohibited; form " + "MissingFieldsForm needs updating." + ) + with self.assertRaisesMessage(ImproperlyConfigured, message): + + class MissingFieldsForm(forms.ModelForm): + class Meta: + model = Category + + def test_extra_fields(self): + class ExtraFields(BaseCategoryForm): + some_extra_field = forms.BooleanField() + + self.assertEqual( + list(ExtraFields.base_fields), ["name", "slug", "url", "some_extra_field"] + ) + + def test_extra_field_model_form(self): + with self.assertRaisesMessage(FieldError, "no-field"): + + class ExtraPersonForm(forms.ModelForm): + """ModelForm with an extra field""" + + age = forms.IntegerField() + + class Meta: + model = Person + fields = ("name", "no-field") + + def test_extra_declared_field_model_form(self): + class ExtraPersonForm(forms.ModelForm): + """ModelForm with an extra field""" + + age = forms.IntegerField() + + class Meta: + model = Person + fields = ("name", "age") + + def test_extra_field_modelform_factory(self): + with self.assertRaisesMessage( + FieldError, "Unknown field(s) (no-field) specified for Person" + ): + modelform_factory(Person, fields=["no-field", "name"]) + + def test_replace_field(self): + class ReplaceField(forms.ModelForm): + url = forms.BooleanField() + + class Meta: + model = Category + fields = "__all__" + + self.assertIsInstance( + ReplaceField.base_fields["url"], forms.fields.BooleanField + ) + + def test_replace_field_variant_2(self): + # Should have the same result as before, + # but 'fields' attribute specified differently + class ReplaceField(forms.ModelForm): + url = forms.BooleanField() + + class Meta: + model = Category + fields = ["url"] + + self.assertIsInstance( + ReplaceField.base_fields["url"], forms.fields.BooleanField + ) + + def test_replace_field_variant_3(self): + # Should have the same result as before, + # but 'fields' attribute specified differently + class ReplaceField(forms.ModelForm): + url = forms.BooleanField() + + class Meta: + model = Category + fields = [] # url will still appear, since it is explicit above + + self.assertIsInstance( + ReplaceField.base_fields["url"], forms.fields.BooleanField + ) + + def test_override_field(self): + class WriterForm(forms.ModelForm): + book = forms.CharField(required=False) + + class Meta: + model = Writer + fields = "__all__" + + wf = WriterForm({"name": "Richard Lockridge"}) + self.assertTrue(wf.is_valid()) + + def test_limit_nonexistent_field(self): + expected_msg = "Unknown field(s) (nonexistent) specified for Category" + with self.assertRaisesMessage(FieldError, expected_msg): + + class InvalidCategoryForm(forms.ModelForm): + class Meta: + model = Category + fields = ["nonexistent"] + + def test_limit_fields_with_string(self): + msg = ( + "CategoryForm.Meta.fields cannot be a string. Did you mean to type: " + "('url',)?" + ) + with self.assertRaisesMessage(TypeError, msg): + + class CategoryForm(forms.ModelForm): + class Meta: + model = Category + fields = "url" # note the missing comma + + def test_exclude_fields(self): + class ExcludeFields(forms.ModelForm): + class Meta: + model = Category + exclude = ["url"] + + self.assertEqual(list(ExcludeFields.base_fields), ["name", "slug"]) + + def test_exclude_nonexistent_field(self): + class ExcludeFields(forms.ModelForm): + class Meta: + model = Category + exclude = ["nonexistent"] + + self.assertEqual(list(ExcludeFields.base_fields), ["name", "slug", "url"]) + + def test_exclude_fields_with_string(self): + msg = ( + "CategoryForm.Meta.exclude cannot be a string. Did you mean to type: " + "('url',)?" + ) + with self.assertRaisesMessage(TypeError, msg): + + class CategoryForm(forms.ModelForm): + class Meta: + model = Category + exclude = "url" # note the missing comma + + def test_exclude_and_validation(self): + # This Price instance generated by this form is not valid because the quantity + # field is required, but the form is valid because the field is excluded from + # the form. This is for backwards compatibility. + class PriceFormWithoutQuantity(forms.ModelForm): + class Meta: + model = Price + exclude = ("quantity",) + + form = PriceFormWithoutQuantity({"price": "6.00"}) + self.assertTrue(form.is_valid()) + price = form.save(commit=False) + msg = "{'quantity': ['This field cannot be null.']}" + with self.assertRaisesMessage(ValidationError, msg): + price.full_clean() + + # The form should not validate fields that it doesn't contain even if they are + # specified using 'fields', not 'exclude'. + class PriceFormWithoutQuantity(forms.ModelForm): + class Meta: + model = Price + fields = ("price",) + + form = PriceFormWithoutQuantity({"price": "6.00"}) + self.assertTrue(form.is_valid()) + + # The form should still have an instance of a model that is not complete and + # not saved into a DB yet. + self.assertEqual(form.instance.price, Decimal("6.00")) + self.assertIsNone(form.instance.quantity) + self.assertIsNone(form.instance.pk) + + def test_confused_form(self): + class ConfusedForm(forms.ModelForm): + """Using 'fields' *and* 'exclude'. Not sure why you'd want to do + this, but uh, "be liberal in what you accept" and all. + """ + + class Meta: + model = Category + fields = ["name", "url"] + exclude = ["url"] + + self.assertEqual(list(ConfusedForm.base_fields), ["name"]) + + def test_mixmodel_form(self): + class MixModelForm(BaseCategoryForm): + """Don't allow more than one 'model' definition in the + inheritance hierarchy. Technically, it would generate a valid + form, but the fact that the resulting save method won't deal with + multiple objects is likely to trip up people not familiar with the + mechanics. + """ + + class Meta: + model = Article + fields = "__all__" + + # MixModelForm is now an Article-related thing, because MixModelForm.Meta + # overrides BaseCategoryForm.Meta. + + self.assertEqual( + list(MixModelForm.base_fields), + [ + "headline", + "slug", + "pub_date", + "writer", + "article", + "categories", + "status", + ], + ) + + def test_article_form(self): + self.assertEqual( + list(ArticleForm.base_fields), + [ + "headline", + "slug", + "pub_date", + "writer", + "article", + "categories", + "status", + ], + ) + + def test_bad_form(self): + # First class with a Meta class wins... + class BadForm(ArticleForm, BaseCategoryForm): + pass + + self.assertEqual( + list(BadForm.base_fields), + [ + "headline", + "slug", + "pub_date", + "writer", + "article", + "categories", + "status", + ], + ) + + def test_invalid_meta_model(self): + class InvalidModelForm(forms.ModelForm): + class Meta: + pass # no model + + # Can't create new form + msg = "ModelForm has no model class specified." + with self.assertRaisesMessage(ValueError, msg): + InvalidModelForm() + + # Even if you provide a model instance + with self.assertRaisesMessage(ValueError, msg): + InvalidModelForm(instance=Category) + + def test_subcategory_form(self): + class SubCategoryForm(BaseCategoryForm): + """Subclassing without specifying a Meta on the class will use + the parent's Meta (or the first parent in the MRO if there are + multiple parent classes). + """ + + pass + + self.assertEqual(list(SubCategoryForm.base_fields), ["name", "slug", "url"]) + + def test_subclassmeta_form(self): + class SomeCategoryForm(forms.ModelForm): + checkbox = forms.BooleanField() + + class Meta: + model = Category + fields = "__all__" + + class SubclassMeta(SomeCategoryForm): + """We can also subclass the Meta inner class to change the fields + list. + """ + + class Meta(SomeCategoryForm.Meta): + exclude = ["url"] + + self.assertHTMLEqual( + str(SubclassMeta()), + '
' + '' + '
' + '' + '
', + ) + + def test_orderfields_form(self): + class OrderFields(forms.ModelForm): + class Meta: + model = Category + fields = ["url", "name"] + + self.assertEqual(list(OrderFields.base_fields), ["url", "name"]) + self.assertHTMLEqual( + str(OrderFields()), + '
' + '' + '
', + ) + + def test_orderfields2_form(self): + class OrderFields2(forms.ModelForm): + class Meta: + model = Category + fields = ["slug", "url", "name"] + exclude = ["url"] + + self.assertEqual(list(OrderFields2.base_fields), ["slug", "name"]) + + def test_default_populated_on_optional_field(self): + class PubForm(forms.ModelForm): + mode = forms.CharField(max_length=255, required=False) + + class Meta: + model = PublicationDefaults + fields = ("mode",) + + # Empty data uses the model field default. + mf1 = PubForm({}) + self.assertEqual(mf1.errors, {}) + m1 = mf1.save(commit=False) + self.assertEqual(m1.mode, "di") + self.assertEqual(m1._meta.get_field("mode").get_default(), "di") + + # Blank data doesn't use the model field default. + mf2 = PubForm({"mode": ""}) + self.assertEqual(mf2.errors, {}) + m2 = mf2.save(commit=False) + self.assertEqual(m2.mode, "") + + def test_default_not_populated_on_non_empty_value_in_cleaned_data(self): + class PubForm(forms.ModelForm): + mode = forms.CharField(max_length=255, required=False) + mocked_mode = None + + def clean(self): + self.cleaned_data["mode"] = self.mocked_mode + return self.cleaned_data + + class Meta: + model = PublicationDefaults + fields = ("mode",) + + pub_form = PubForm({}) + pub_form.mocked_mode = "de" + pub = pub_form.save(commit=False) + self.assertEqual(pub.mode, "de") + # Default should be populated on an empty value in cleaned_data. + default_mode = "di" + for empty_value in pub_form.fields["mode"].empty_values: + with self.subTest(empty_value=empty_value): + pub_form = PubForm({}) + pub_form.mocked_mode = empty_value + pub = pub_form.save(commit=False) + self.assertEqual(pub.mode, default_mode) + + def test_default_not_populated_on_optional_checkbox_input(self): + class PubForm(forms.ModelForm): + class Meta: + model = PublicationDefaults + fields = ("active",) + + # Empty data doesn't use the model default because CheckboxInput + # doesn't have a value in HTML form submission. + mf1 = PubForm({}) + self.assertEqual(mf1.errors, {}) + m1 = mf1.save(commit=False) + self.assertIs(m1.active, False) + self.assertIsInstance(mf1.fields["active"].widget, forms.CheckboxInput) + self.assertIs(m1._meta.get_field("active").get_default(), True) + + def test_default_not_populated_on_checkboxselectmultiple(self): + class PubForm(forms.ModelForm): + mode = forms.CharField(required=False, widget=forms.CheckboxSelectMultiple) + + class Meta: + model = PublicationDefaults + fields = ("mode",) + + # Empty data doesn't use the model default because an unchecked + # CheckboxSelectMultiple doesn't have a value in HTML form submission. + mf1 = PubForm({}) + self.assertEqual(mf1.errors, {}) + m1 = mf1.save(commit=False) + self.assertEqual(m1.mode, "") + self.assertEqual(m1._meta.get_field("mode").get_default(), "di") + + def test_default_not_populated_on_selectmultiple(self): + class PubForm(forms.ModelForm): + mode = forms.CharField(required=False, widget=forms.SelectMultiple) + + class Meta: + model = PublicationDefaults + fields = ("mode",) + + # Empty data doesn't use the model default because an unselected + # SelectMultiple doesn't have a value in HTML form submission. + mf1 = PubForm({}) + self.assertEqual(mf1.errors, {}) + m1 = mf1.save(commit=False) + self.assertEqual(m1.mode, "") + self.assertEqual(m1._meta.get_field("mode").get_default(), "di") + + def test_prefixed_form_with_default_field(self): + class PubForm(forms.ModelForm): + prefix = "form-prefix" + + class Meta: + model = PublicationDefaults + fields = ("mode",) + + mode = "de" + self.assertNotEqual( + mode, PublicationDefaults._meta.get_field("mode").get_default() + ) + + mf1 = PubForm({"form-prefix-mode": mode}) + self.assertEqual(mf1.errors, {}) + m1 = mf1.save(commit=False) + self.assertEqual(m1.mode, mode) + + def test_renderer_kwarg(self): + custom = object() + self.assertIs(ProductForm(renderer=custom).renderer, custom) + + def test_default_splitdatetime_field(self): + class PubForm(forms.ModelForm): + datetime_published = forms.SplitDateTimeField(required=False) + + class Meta: + model = PublicationDefaults + fields = ("datetime_published",) + + mf1 = PubForm({}) + self.assertEqual(mf1.errors, {}) + m1 = mf1.save(commit=False) + self.assertEqual(m1.datetime_published, datetime.datetime(2000, 1, 1)) + + mf2 = PubForm( + {"datetime_published_0": "2010-01-01", "datetime_published_1": "0:00:00"} + ) + self.assertEqual(mf2.errors, {}) + m2 = mf2.save(commit=False) + self.assertEqual(m2.datetime_published, datetime.datetime(2010, 1, 1)) + + def test_default_filefield(self): + class PubForm(forms.ModelForm): + class Meta: + model = PublicationDefaults + fields = ("file",) + + mf1 = PubForm({}) + self.assertEqual(mf1.errors, {}) + m1 = mf1.save(commit=False) + self.assertEqual(m1.file.name, "default.txt") + + mf2 = PubForm({}, {"file": SimpleUploadedFile("name", b"foo")}) + self.assertEqual(mf2.errors, {}) + m2 = mf2.save(commit=False) + self.assertEqual(m2.file.name, "name") + + def test_default_selectdatewidget(self): + class PubForm(forms.ModelForm): + date_published = forms.DateField( + required=False, widget=forms.SelectDateWidget + ) + + class Meta: + model = PublicationDefaults + fields = ("date_published",) + + mf1 = PubForm({}) + self.assertEqual(mf1.errors, {}) + m1 = mf1.save(commit=False) + self.assertEqual(m1.date_published, datetime.date.today()) + + mf2 = PubForm( + { + "date_published_year": "2010", + "date_published_month": "1", + "date_published_day": "1", + } + ) + self.assertEqual(mf2.errors, {}) + m2 = mf2.save(commit=False) + self.assertEqual(m2.date_published, datetime.date(2010, 1, 1)) + + +# RemovedInDjango60Warning. +# It's a temporary workaround for the deprecation period. +class HttpsURLField(forms.URLField): + def __init__(self, **kwargs): + super().__init__(assume_scheme="https", **kwargs) + + +class FieldOverridesByFormMetaForm(forms.ModelForm): + class Meta: + model = Category + fields = ["name", "url", "slug"] + widgets = { + "name": forms.Textarea, + "url": forms.TextInput(attrs={"class": "url"}), + } + labels = { + "name": "Title", + } + help_texts = { + "slug": "Watch out! Letters, numbers, underscores and hyphens only.", + } + error_messages = { + "slug": { + "invalid": ( + "Didn't you read the help text? " + "We said letters, numbers, underscores and hyphens only!" + ) + } + } + field_classes = { + "url": HttpsURLField, + } + + +class TestFieldOverridesByFormMeta(SimpleTestCase): + def test_widget_overrides(self): + form = FieldOverridesByFormMetaForm() + self.assertHTMLEqual( + str(form["name"]), + '", + ) + self.assertHTMLEqual( + str(form["url"]), + '", + ) + self.assertHTMLEqual( + str(form["slug"]), + '', + ) + + def test_label_overrides(self): + form = FieldOverridesByFormMetaForm() + self.assertHTMLEqual( + str(form["name"].label_tag()), + '', + ) + self.assertHTMLEqual( + str(form["url"].label_tag()), + '', + ) + self.assertHTMLEqual( + str(form["slug"].label_tag()), + '', + ) + self.assertHTMLEqual( + form["name"].legend_tag(), + 'Title:', + ) + self.assertHTMLEqual( + form["url"].legend_tag(), + 'The URL:', + ) + self.assertHTMLEqual( + form["slug"].legend_tag(), + 'Slug:', + ) + + def test_help_text_overrides(self): + form = FieldOverridesByFormMetaForm() + self.assertEqual( + form["slug"].help_text, + "Watch out! Letters, numbers, underscores and hyphens only.", + ) + + def test_error_messages_overrides(self): + form = FieldOverridesByFormMetaForm( + data={ + "name": "Category", + "url": "http://www.example.com/category/", + "slug": "!%#*@", + } + ) + form.full_clean() + + error = [ + "Didn't you read the help text? " + "We said letters, numbers, underscores and hyphens only!", + ] + self.assertEqual(form.errors, {"slug": error}) + + def test_field_type_overrides(self): + form = FieldOverridesByFormMetaForm() + self.assertIs(Category._meta.get_field("url").__class__, models.CharField) + self.assertIsInstance(form.fields["url"], forms.URLField) + + +class IncompleteCategoryFormWithFields(forms.ModelForm): + """ + A form that replaces the model's url field with a custom one. This should + prevent the model field's validation from being called. + """ + + url = forms.CharField(required=False) + + class Meta: + fields = ("name", "slug") + model = Category + + +class IncompleteCategoryFormWithExclude(forms.ModelForm): + """ + A form that replaces the model's url field with a custom one. This should + prevent the model field's validation from being called. + """ + + url = forms.CharField(required=False) + + class Meta: + exclude = ["url"] + model = Category + + +class ValidationTest(SimpleTestCase): + def test_validates_with_replaced_field_not_specified(self): + form = IncompleteCategoryFormWithFields( + data={"name": "some name", "slug": "some-slug"} + ) + self.assertIs(form.is_valid(), True) + + def test_validates_with_replaced_field_excluded(self): + form = IncompleteCategoryFormWithExclude( + data={"name": "some name", "slug": "some-slug"} + ) + self.assertIs(form.is_valid(), True) + + def test_notrequired_overrides_notblank(self): + form = CustomWriterForm({}) + self.assertIs(form.is_valid(), True) + + +class UniqueTest(TestCase): + """ + unique/unique_together validation. + """ + + @classmethod + def setUpTestData(cls): + cls.writer = Writer.objects.create(name="Mike Royko") + + def test_simple_unique(self): + form = ProductForm({"slug": "teddy-bear-blue"}) + self.assertTrue(form.is_valid()) + obj = form.save() + form = ProductForm({"slug": "teddy-bear-blue"}) + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors["slug"], ["Product with this Slug already exists."] + ) + form = ProductForm({"slug": "teddy-bear-blue"}, instance=obj) + self.assertTrue(form.is_valid()) + + def test_unique_together(self): + """ModelForm test of unique_together constraint""" + form = PriceForm({"price": "6.00", "quantity": "1"}) + self.assertTrue(form.is_valid()) + form.save() + form = PriceForm({"price": "6.00", "quantity": "1"}) + self.assertFalse(form.is_valid()) + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors["__all__"], + ["Price with this Price and Quantity already exists."], + ) + + def test_unique_together_exclusion(self): + """ + Forms don't validate unique_together constraints when only part of the + constraint is included in the form's fields. This allows using + form.save(commit=False) and then assigning the missing field(s) to the + model instance. + """ + + class BookForm(forms.ModelForm): + class Meta: + model = DerivedBook + fields = ("isbn", "suffix1") + + # The unique_together is on suffix1/suffix2 but only suffix1 is part + # of the form. The fields must have defaults, otherwise they'll be + # skipped by other logic. + self.assertEqual(DerivedBook._meta.unique_together, (("suffix1", "suffix2"),)) + for name in ("suffix1", "suffix2"): + with self.subTest(name=name): + field = DerivedBook._meta.get_field(name) + self.assertEqual(field.default, 0) + + # The form fails validation with "Derived book with this Suffix1 and + # Suffix2 already exists." if the unique_together validation isn't + # skipped. + DerivedBook.objects.create(isbn="12345") + form = BookForm({"isbn": "56789", "suffix1": "0"}) + self.assertTrue(form.is_valid(), form.errors) + + def test_multiple_field_unique_together(self): + """ + When the same field is involved in multiple unique_together + constraints, we need to make sure we don't remove the data for it + before doing all the validation checking (not just failing after + the first one). + """ + + class TripleForm(forms.ModelForm): + class Meta: + model = Triple + fields = "__all__" + + Triple.objects.create(left=1, middle=2, right=3) + + form = TripleForm({"left": "1", "middle": "2", "right": "3"}) + self.assertFalse(form.is_valid()) + + form = TripleForm({"left": "1", "middle": "3", "right": "1"}) + self.assertTrue(form.is_valid()) + + @skipUnlessDBFeature("supports_nullable_unique_constraints") + def test_unique_null(self): + title = "I May Be Wrong But I Doubt It" + form = BookForm({"title": title, "author": self.writer.pk}) + self.assertTrue(form.is_valid()) + form.save() + form = BookForm({"title": title, "author": self.writer.pk}) + self.assertFalse(form.is_valid()) + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors["__all__"], ["Book with this Title and Author already exists."] + ) + form = BookForm({"title": title}) + self.assertTrue(form.is_valid()) + form.save() + form = BookForm({"title": title}) + self.assertTrue(form.is_valid()) + + def test_inherited_unique(self): + title = "Boss" + Book.objects.create(title=title, author=self.writer, special_id=1) + form = DerivedBookForm( + { + "title": "Other", + "author": self.writer.pk, + "special_id": "1", + "isbn": "12345", + } + ) + self.assertFalse(form.is_valid()) + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors["special_id"], ["Book with this Special id already exists."] + ) + + def test_inherited_unique_together(self): + title = "Boss" + form = BookForm({"title": title, "author": self.writer.pk}) + self.assertTrue(form.is_valid()) + form.save() + form = DerivedBookForm( + {"title": title, "author": self.writer.pk, "isbn": "12345"} + ) + self.assertFalse(form.is_valid()) + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors["__all__"], ["Book with this Title and Author already exists."] + ) + + def test_abstract_inherited_unique(self): + title = "Boss" + isbn = "12345" + DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn) + form = DerivedBookForm( + { + "title": "Other", + "author": self.writer.pk, + "isbn": isbn, + "suffix1": "1", + "suffix2": "2", + } + ) + self.assertFalse(form.is_valid()) + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors["isbn"], ["Derived book with this Isbn already exists."] + ) + + def test_abstract_inherited_unique_together(self): + title = "Boss" + isbn = "12345" + DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn) + form = DerivedBookForm( + { + "title": "Other", + "author": self.writer.pk, + "isbn": "9876", + "suffix1": "0", + "suffix2": "0", + } + ) + self.assertFalse(form.is_valid()) + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors["__all__"], + ["Derived book with this Suffix1 and Suffix2 already exists."], + ) + + def test_explicitpk_unspecified(self): + """Test for primary_key being in the form and failing validation.""" + form = ExplicitPKForm({"key": "", "desc": ""}) + self.assertFalse(form.is_valid()) + + def test_explicitpk_unique(self): + """Ensure keys and blank character strings are tested for uniqueness.""" + form = ExplicitPKForm({"key": "key1", "desc": ""}) + self.assertTrue(form.is_valid()) + form.save() + form = ExplicitPKForm({"key": "key1", "desc": ""}) + self.assertFalse(form.is_valid()) + if connection.features.interprets_empty_strings_as_nulls: + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors["key"], ["Explicit pk with this Key already exists."] + ) + else: + self.assertEqual(len(form.errors), 3) + self.assertEqual( + form.errors["__all__"], + ["Explicit pk with this Key and Desc already exists."], + ) + self.assertEqual( + form.errors["desc"], ["Explicit pk with this Desc already exists."] + ) + self.assertEqual( + form.errors["key"], ["Explicit pk with this Key already exists."] + ) + + def test_unique_for_date(self): + p = Post.objects.create( + title="Django 1.0 is released", + slug="Django 1.0", + subtitle="Finally", + posted=datetime.date(2008, 9, 3), + ) + form = PostForm({"title": "Django 1.0 is released", "posted": "2008-09-03"}) + self.assertFalse(form.is_valid()) + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors["title"], ["Title must be unique for Posted date."] + ) + form = PostForm({"title": "Work on Django 1.1 begins", "posted": "2008-09-03"}) + self.assertTrue(form.is_valid()) + form = PostForm({"title": "Django 1.0 is released", "posted": "2008-09-04"}) + self.assertTrue(form.is_valid()) + form = PostForm({"slug": "Django 1.0", "posted": "2008-01-01"}) + self.assertFalse(form.is_valid()) + self.assertEqual(len(form.errors), 1) + self.assertEqual(form.errors["slug"], ["Slug must be unique for Posted year."]) + form = PostForm({"subtitle": "Finally", "posted": "2008-09-30"}) + self.assertFalse(form.is_valid()) + self.assertEqual( + form.errors["subtitle"], ["Subtitle must be unique for Posted month."] + ) + data = { + "subtitle": "Finally", + "title": "Django 1.0 is released", + "slug": "Django 1.0", + "posted": "2008-09-03", + } + form = PostForm(data, instance=p) + self.assertTrue(form.is_valid()) + form = PostForm({"title": "Django 1.0 is released"}) + self.assertFalse(form.is_valid()) + self.assertEqual(len(form.errors), 1) + self.assertEqual(form.errors["posted"], ["This field is required."]) + + def test_unique_for_date_in_exclude(self): + """ + If the date for unique_for_* constraints is excluded from the + ModelForm (in this case 'posted' has editable=False, then the + constraint should be ignored. + """ + + class DateTimePostForm(forms.ModelForm): + class Meta: + model = DateTimePost + fields = "__all__" + + DateTimePost.objects.create( + title="Django 1.0 is released", + slug="Django 1.0", + subtitle="Finally", + posted=datetime.datetime(2008, 9, 3, 10, 10, 1), + ) + # 'title' has unique_for_date='posted' + form = DateTimePostForm( + {"title": "Django 1.0 is released", "posted": "2008-09-03"} + ) + self.assertTrue(form.is_valid()) + # 'slug' has unique_for_year='posted' + form = DateTimePostForm({"slug": "Django 1.0", "posted": "2008-01-01"}) + self.assertTrue(form.is_valid()) + # 'subtitle' has unique_for_month='posted' + form = DateTimePostForm({"subtitle": "Finally", "posted": "2008-09-30"}) + self.assertTrue(form.is_valid()) + + def test_inherited_unique_for_date(self): + p = Post.objects.create( + title="Django 1.0 is released", + slug="Django 1.0", + subtitle="Finally", + posted=datetime.date(2008, 9, 3), + ) + form = DerivedPostForm( + {"title": "Django 1.0 is released", "posted": "2008-09-03"} + ) + self.assertFalse(form.is_valid()) + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors["title"], ["Title must be unique for Posted date."] + ) + form = DerivedPostForm( + {"title": "Work on Django 1.1 begins", "posted": "2008-09-03"} + ) + self.assertTrue(form.is_valid()) + form = DerivedPostForm( + {"title": "Django 1.0 is released", "posted": "2008-09-04"} + ) + self.assertTrue(form.is_valid()) + form = DerivedPostForm({"slug": "Django 1.0", "posted": "2008-01-01"}) + self.assertFalse(form.is_valid()) + self.assertEqual(len(form.errors), 1) + self.assertEqual(form.errors["slug"], ["Slug must be unique for Posted year."]) + form = DerivedPostForm({"subtitle": "Finally", "posted": "2008-09-30"}) + self.assertFalse(form.is_valid()) + self.assertEqual( + form.errors["subtitle"], ["Subtitle must be unique for Posted month."] + ) + data = { + "subtitle": "Finally", + "title": "Django 1.0 is released", + "slug": "Django 1.0", + "posted": "2008-09-03", + } + form = DerivedPostForm(data, instance=p) + self.assertTrue(form.is_valid()) + + def test_unique_for_date_with_nullable_date(self): + class FlexDatePostForm(forms.ModelForm): + class Meta: + model = FlexibleDatePost + fields = "__all__" + + p = FlexibleDatePost.objects.create( + title="Django 1.0 is released", + slug="Django 1.0", + subtitle="Finally", + posted=datetime.date(2008, 9, 3), + ) + + form = FlexDatePostForm({"title": "Django 1.0 is released"}) + self.assertTrue(form.is_valid()) + form = FlexDatePostForm({"slug": "Django 1.0"}) + self.assertTrue(form.is_valid()) + form = FlexDatePostForm({"subtitle": "Finally"}) + self.assertTrue(form.is_valid()) + data = { + "subtitle": "Finally", + "title": "Django 1.0 is released", + "slug": "Django 1.0", + } + form = FlexDatePostForm(data, instance=p) + self.assertTrue(form.is_valid()) + + def test_override_unique_message(self): + class CustomProductForm(ProductForm): + class Meta(ProductForm.Meta): + error_messages = { + "slug": { + "unique": "%(model_name)s's %(field_label)s not unique.", + } + } + + Product.objects.create(slug="teddy-bear-blue") + form = CustomProductForm({"slug": "teddy-bear-blue"}) + self.assertEqual(len(form.errors), 1) + self.assertEqual(form.errors["slug"], ["Product's Slug not unique."]) + + def test_override_unique_together_message(self): + class CustomPriceForm(PriceForm): + class Meta(PriceForm.Meta): + error_messages = { + NON_FIELD_ERRORS: { + "unique_together": ( + "%(model_name)s's %(field_labels)s not unique." + ), + } + } + + Price.objects.create(price=6.00, quantity=1) + form = CustomPriceForm({"price": "6.00", "quantity": "1"}) + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors[NON_FIELD_ERRORS], ["Price's Price and Quantity not unique."] + ) + + def test_override_unique_for_date_message(self): + class CustomPostForm(PostForm): + class Meta(PostForm.Meta): + error_messages = { + "title": { + "unique_for_date": ( + "%(model_name)s's %(field_label)s not unique " + "for %(date_field_label)s date." + ), + } + } + + Post.objects.create( + title="Django 1.0 is released", + slug="Django 1.0", + subtitle="Finally", + posted=datetime.date(2008, 9, 3), + ) + form = CustomPostForm( + {"title": "Django 1.0 is released", "posted": "2008-09-03"} + ) + self.assertEqual(len(form.errors), 1) + self.assertEqual( + form.errors["title"], ["Post's Title not unique for Posted date."] + ) + + +class ModelFormBasicTests(TestCase): + def create_basic_data(self): + self.c1 = Category.objects.create( + name="Entertainment", slug="entertainment", url="entertainment" + ) + self.c2 = Category.objects.create( + name="It's a test", slug="its-test", url="test" + ) + self.c3 = Category.objects.create( + name="Third test", slug="third-test", url="third" + ) + self.w_royko = Writer.objects.create(name="Mike Royko") + self.w_woodward = Writer.objects.create(name="Bob Woodward") + + def test_base_form(self): + self.assertEqual(Category.objects.count(), 0) + f = BaseCategoryForm() + self.assertHTMLEqual( + str(f), + '
' + '
', + ) + self.assertHTMLEqual( + str(f.as_ul()), + """ +
  • +
  • +
  • +
  • +
  • +
  • + """, + ) + self.assertHTMLEqual( + str(f["name"]), + """""", + ) + + def test_auto_id(self): + f = BaseCategoryForm(auto_id=False) + self.assertHTMLEqual( + str(f.as_ul()), + """
  • Name:
  • +
  • Slug:
  • +
  • The URL:
  • """, + ) + + def test_initial_values(self): + self.create_basic_data() + # Initial values can be provided for model forms + f = ArticleForm( + auto_id=False, + initial={ + "headline": "Your headline here", + "categories": [str(self.c1.id), str(self.c2.id)], + }, + ) + self.assertHTMLEqual( + f.as_ul(), + """ +
  • Headline: + +
  • +
  • Slug:
  • +
  • Pub date:
  • +
  • Writer:
  • +
  • Article: +
  • +
  • Categories:
  • +
  • Status:
  • + """ + % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk), + ) + + # When the ModelForm is passed an instance, that instance's current values are + # inserted as 'initial' data in each Field. + f = RoykoForm(auto_id=False, instance=self.w_royko) + self.assertHTMLEqual( + str(f), + '
    Name:
    Use both first and last names.
    ' + '
    ", + ) + + art = Article.objects.create( + headline="Test article", + slug="test-article", + pub_date=datetime.date(1988, 1, 4), + writer=self.w_royko, + article="Hello.", + ) + art_id_1 = art.id + + f = ArticleForm(auto_id=False, instance=art) + self.assertHTMLEqual( + f.as_ul(), + """ +
  • Headline: + +
  • +
  • Slug: + +
  • +
  • Pub date: +
  • +
  • Writer:
  • +
  • Article: +
  • +
  • Categories:
  • +
  • Status:
  • + """ + % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk), + ) + + f = ArticleForm( + { + "headline": "Test headline", + "slug": "test-headline", + "pub_date": "1984-02-06", + "writer": str(self.w_royko.pk), + "article": "Hello.", + }, + instance=art, + ) + self.assertEqual(f.errors, {}) + self.assertTrue(f.is_valid()) + test_art = f.save() + self.assertEqual(test_art.id, art_id_1) + test_art = Article.objects.get(id=art_id_1) + self.assertEqual(test_art.headline, "Test headline") + + def test_m2m_initial_callable(self): + """ + A callable can be provided as the initial value for an m2m field. + """ + self.maxDiff = 1200 + self.create_basic_data() + + # Set up a callable initial value + def formfield_for_dbfield(db_field, **kwargs): + if db_field.name == "categories": + kwargs["initial"] = lambda: Category.objects.order_by("name")[:2] + return db_field.formfield(**kwargs) + + # Create a ModelForm, instantiate it, and check that the output is as expected + ModelForm = modelform_factory( + Article, + fields=["headline", "categories"], + formfield_callback=formfield_for_dbfield, + ) + form = ModelForm() + self.assertHTMLEqual( + form.as_ul(), + """
  • +
  • +
  • +
  • """ + % (self.c1.pk, self.c2.pk, self.c3.pk), + ) + + def test_basic_creation(self): + self.assertEqual(Category.objects.count(), 0) + f = BaseCategoryForm( + { + "name": "Entertainment", + "slug": "entertainment", + "url": "entertainment", + } + ) + self.assertTrue(f.is_valid()) + self.assertEqual(f.cleaned_data["name"], "Entertainment") + self.assertEqual(f.cleaned_data["slug"], "entertainment") + self.assertEqual(f.cleaned_data["url"], "entertainment") + c1 = f.save() + # Testing whether the same object is returned from the + # ORM... not the fastest way... + + self.assertEqual(Category.objects.count(), 1) + self.assertEqual(c1, Category.objects.all()[0]) + self.assertEqual(c1.name, "Entertainment") + + def test_save_commit_false(self): + # If you call save() with commit=False, then it will return an object that + # hasn't yet been saved to the database. In this case, it's up to you to call + # save() on the resulting model instance. + f = BaseCategoryForm( + {"name": "Third test", "slug": "third-test", "url": "third"} + ) + self.assertTrue(f.is_valid()) + c1 = f.save(commit=False) + self.assertEqual(c1.name, "Third test") + self.assertEqual(Category.objects.count(), 0) + c1.save() + self.assertEqual(Category.objects.count(), 1) + + def test_save_with_data_errors(self): + # If you call save() with invalid data, you'll get a ValueError. + f = BaseCategoryForm({"name": "", "slug": "not a slug!", "url": "foo"}) + self.assertEqual(f.errors["name"], ["This field is required."]) + self.assertEqual( + f.errors["slug"], + [ + "Enter a valid “slug” consisting of letters, numbers, underscores or " + "hyphens." + ], + ) + self.assertEqual(f.cleaned_data, {"url": "foo"}) + msg = "The Category could not be created because the data didn't validate." + with self.assertRaisesMessage(ValueError, msg): + f.save() + f = BaseCategoryForm({"name": "", "slug": "", "url": "foo"}) + with self.assertRaisesMessage(ValueError, msg): + f.save() + + def test_multi_fields(self): + self.create_basic_data() + self.maxDiff = None + # ManyToManyFields are represented by a MultipleChoiceField, ForeignKeys and any + # fields with the 'choices' attribute are represented by a ChoiceField. + f = ArticleForm(auto_id=False) + self.assertHTMLEqual( + str(f), + """ +
    Headline: + +
    +
    Slug: + +
    +
    Pub date: + +
    +
    Writer: + +
    +
    Article: + +
    +
    Categories: + +
    +
    Status: + +
    + """ + % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk), + ) + + # Add some categories and test the many-to-many form output. + new_art = Article.objects.create( + article="Hello.", + headline="New headline", + slug="new-headline", + pub_date=datetime.date(1988, 1, 4), + writer=self.w_royko, + ) + new_art.categories.add(Category.objects.get(name="Entertainment")) + self.assertSequenceEqual(new_art.categories.all(), [self.c1]) + f = ArticleForm(auto_id=False, instance=new_art) + self.assertHTMLEqual( + f.as_ul(), + """ +
  • Headline: + +
  • +
  • Slug: + +
  • +
  • Pub date: +
  • +
  • Writer:
  • +
  • Article: +
  • +
  • Categories:
  • +
  • Status:
  • + """ + % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk), + ) + + def test_subset_fields(self): + # You can restrict a form to a subset of the complete list of fields + # by providing a 'fields' argument. If you try to save a + # model created with such a form, you need to ensure that the fields + # that are _not_ on the form have default values, or are allowed to have + # a value of None. If a field isn't specified on a form, the object created + # from the form can't provide a value for that field! + class PartialArticleForm(forms.ModelForm): + class Meta: + model = Article + fields = ("headline", "pub_date") + + f = PartialArticleForm(auto_id=False) + self.assertHTMLEqual( + str(f), + '
    Headline:' + '
    Pub date:
    ', + ) + + class PartialArticleFormWithSlug(forms.ModelForm): + class Meta: + model = Article + fields = ("headline", "slug", "pub_date") + + w_royko = Writer.objects.create(name="Mike Royko") + art = Article.objects.create( + article="Hello.", + headline="New headline", + slug="new-headline", + pub_date=datetime.date(1988, 1, 4), + writer=w_royko, + ) + f = PartialArticleFormWithSlug( + { + "headline": "New headline", + "slug": "new-headline", + "pub_date": "1988-01-04", + }, + auto_id=False, + instance=art, + ) + self.assertHTMLEqual( + f.as_ul(), + """ +
  • Headline: + +
  • +
  • Slug: + +
  • +
  • Pub date: +
  • + """, + ) + self.assertTrue(f.is_valid()) + new_art = f.save() + self.assertEqual(new_art.id, art.id) + new_art = Article.objects.get(id=art.id) + self.assertEqual(new_art.headline, "New headline") + + def test_m2m_editing(self): + self.create_basic_data() + form_data = { + "headline": "New headline", + "slug": "new-headline", + "pub_date": "1988-01-04", + "writer": str(self.w_royko.pk), + "article": "Hello.", + "categories": [str(self.c1.id), str(self.c2.id)], + } + # Create a new article, with categories, via the form. + f = ArticleForm(form_data) + new_art = f.save() + new_art = Article.objects.get(id=new_art.id) + art_id_1 = new_art.id + self.assertSequenceEqual( + new_art.categories.order_by("name"), [self.c1, self.c2] + ) + + # Now, submit form data with no categories. This deletes the existing + # categories. + form_data["categories"] = [] + f = ArticleForm(form_data, instance=new_art) + new_art = f.save() + self.assertEqual(new_art.id, art_id_1) + new_art = Article.objects.get(id=art_id_1) + self.assertSequenceEqual(new_art.categories.all(), []) + + # Create a new article, with no categories, via the form. + f = ArticleForm(form_data) + new_art = f.save() + art_id_2 = new_art.id + self.assertNotIn(art_id_2, (None, art_id_1)) + new_art = Article.objects.get(id=art_id_2) + self.assertSequenceEqual(new_art.categories.all(), []) + + # Create a new article, with categories, via the form, but use commit=False. + # The m2m data won't be saved until save_m2m() is invoked on the form. + form_data["categories"] = [str(self.c1.id), str(self.c2.id)] + f = ArticleForm(form_data) + new_art = f.save(commit=False) + + # Manually save the instance + new_art.save() + art_id_3 = new_art.id + self.assertNotIn(art_id_3, (None, art_id_1, art_id_2)) + + # The instance doesn't have m2m data yet + new_art = Article.objects.get(id=art_id_3) + self.assertSequenceEqual(new_art.categories.all(), []) + + # Save the m2m data on the form + f.save_m2m() + self.assertSequenceEqual( + new_art.categories.order_by("name"), [self.c1, self.c2] + ) + + def test_custom_form_fields(self): + # Here, we define a custom ModelForm. Because it happens to have the + # same fields as the Category model, we can just call the form's save() + # to apply its changes to an existing Category instance. + class ShortCategory(forms.ModelForm): + name = forms.CharField(max_length=5) + slug = forms.CharField(max_length=5) + url = forms.CharField(max_length=3) + + class Meta: + model = Category + fields = "__all__" + + cat = Category.objects.create(name="Third test") + form = ShortCategory( + {"name": "Third", "slug": "third", "url": "3rd"}, instance=cat + ) + self.assertEqual(form.save().name, "Third") + self.assertEqual(Category.objects.get(id=cat.id).name, "Third") + + def test_runtime_choicefield_populated(self): + self.maxDiff = None + # Here, we demonstrate that choices for a ForeignKey ChoiceField are determined + # at runtime, based on the data in the database when the form is displayed, not + # the data in the database when the form is instantiated. + self.create_basic_data() + f = ArticleForm(auto_id=False) + self.assertHTMLEqual( + f.as_ul(), + '
  • Headline: ' + "
  • " + '
  • Slug:
  • ' + '
  • Pub date:
  • ' + '
  • Writer:
  • " + '
  • Article:
  • " + '
  • Categories:
  • " + '
  • Status:
  • " + % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk), + ) + + c4 = Category.objects.create(name="Fourth", url="4th") + w_bernstein = Writer.objects.create(name="Carl Bernstein") + self.assertHTMLEqual( + f.as_ul(), + '
  • Headline: ' + "
  • " + '
  • Slug:
  • ' + '
  • Pub date:
  • ' + '
  • Writer:
  • " + '
  • Article:
  • " + '
  • Categories:
  • " + '
  • Status:
  • " + % ( + self.w_woodward.pk, + w_bernstein.pk, + self.w_royko.pk, + self.c1.pk, + self.c2.pk, + self.c3.pk, + c4.pk, + ), + ) + + def test_recleaning_model_form_instance(self): + """ + Re-cleaning an instance that was added via a ModelForm shouldn't raise + a pk uniqueness error. + """ + + class AuthorForm(forms.ModelForm): + class Meta: + model = Author + fields = "__all__" + + form = AuthorForm({"full_name": "Bob"}) + self.assertTrue(form.is_valid()) + obj = form.save() + obj.name = "Alice" + obj.full_clean() + + def test_validate_foreign_key_uses_default_manager(self): + class MyForm(forms.ModelForm): + class Meta: + model = Article + fields = "__all__" + + # Archived writers are filtered out by the default manager. + w = Writer.objects.create(name="Randy", archived=True) + data = { + "headline": "My Article", + "slug": "my-article", + "pub_date": datetime.date.today(), + "writer": w.pk, + "article": "lorem ipsum", + } + form = MyForm(data) + self.assertIs(form.is_valid(), False) + self.assertEqual( + form.errors, + { + "writer": [ + "Select a valid choice. That choice is not one of the available " + "choices." + ] + }, + ) + + def test_validate_foreign_key_to_model_with_overridden_manager(self): + class MyForm(forms.ModelForm): + class Meta: + model = Article + fields = "__all__" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # Allow archived authors. + self.fields["writer"].queryset = Writer._base_manager.all() + + w = Writer.objects.create(name="Randy", archived=True) + data = { + "headline": "My Article", + "slug": "my-article", + "pub_date": datetime.date.today(), + "writer": w.pk, + "article": "lorem ipsum", + } + form = MyForm(data) + self.assertIs(form.is_valid(), True) + article = form.save() + self.assertEqual(article.writer, w) + + +class ModelMultipleChoiceFieldTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.c1 = Category.objects.create( + name="Entertainment", slug="entertainment", url="entertainment" + ) + cls.c2 = Category.objects.create( + name="It's a test", slug="its-test", url="test" + ) + cls.c3 = Category.objects.create(name="Third", slug="third-test", url="third") + + def test_model_multiple_choice_field(self): + f = forms.ModelMultipleChoiceField(Category.objects.all()) + self.assertCountEqual( + list(f.choices), + [ + (self.c1.pk, "Entertainment"), + (self.c2.pk, "It's a test"), + (self.c3.pk, "Third"), + ], + ) + with self.assertRaises(ValidationError): + f.clean(None) + with self.assertRaises(ValidationError): + f.clean([]) + self.assertCountEqual(f.clean([self.c1.id]), [self.c1]) + self.assertCountEqual(f.clean([self.c2.id]), [self.c2]) + self.assertCountEqual(f.clean([str(self.c1.id)]), [self.c1]) + self.assertCountEqual( + f.clean([str(self.c1.id), str(self.c2.id)]), + [self.c1, self.c2], + ) + self.assertCountEqual( + f.clean([self.c1.id, str(self.c2.id)]), + [self.c1, self.c2], + ) + self.assertCountEqual( + f.clean((self.c1.id, str(self.c2.id))), + [self.c1, self.c2], + ) + with self.assertRaises(ValidationError): + f.clean(["0"]) + with self.assertRaises(ValidationError): + f.clean("hello") + with self.assertRaises(ValidationError): + f.clean(["fail"]) + + # Invalid types that require TypeError to be caught (#22808). + with self.assertRaises(ValidationError): + f.clean([["fail"]]) + with self.assertRaises(ValidationError): + f.clean([{"foo": "bar"}]) + + # Add a Category object *after* the ModelMultipleChoiceField has already been + # instantiated. This proves clean() checks the database during clean() rather + # than caching it at time of instantiation. + # Note, we are using an id of 1006 here since tests that run before + # this may create categories with primary keys up to 6. Use + # a number that will not conflict. + c6 = Category.objects.create(id=1006, name="Sixth", url="6th") + self.assertCountEqual(f.clean([c6.id]), [c6]) + + # Delete a Category object *after* the ModelMultipleChoiceField has already been + # instantiated. This proves clean() checks the database during clean() rather + # than caching it at time of instantiation. + Category.objects.get(url="6th").delete() + with self.assertRaises(ValidationError): + f.clean([c6.id]) + + def test_model_multiple_choice_required_false(self): + f = forms.ModelMultipleChoiceField(Category.objects.all(), required=False) + self.assertIsInstance(f.clean([]), EmptyQuerySet) + self.assertIsInstance(f.clean(()), EmptyQuerySet) + with self.assertRaises(ValidationError): + f.clean(["0"]) + with self.assertRaises(ValidationError): + f.clean([str(self.c3.id), "0"]) + with self.assertRaises(ValidationError): + f.clean([str(self.c1.id), "0"]) + + # queryset can be changed after the field is created. + f.queryset = Category.objects.exclude(name="Third") + self.assertCountEqual( + list(f.choices), + [(self.c1.pk, "Entertainment"), (self.c2.pk, "It's a test")], + ) + self.assertSequenceEqual(f.clean([self.c2.id]), [self.c2]) + with self.assertRaises(ValidationError): + f.clean([self.c3.id]) + with self.assertRaises(ValidationError): + f.clean([str(self.c2.id), str(self.c3.id)]) + + f.queryset = Category.objects.all() + f.label_from_instance = lambda obj: "multicategory " + str(obj) + self.assertCountEqual( + list(f.choices), + [ + (self.c1.pk, "multicategory Entertainment"), + (self.c2.pk, "multicategory It's a test"), + (self.c3.pk, "multicategory Third"), + ], + ) + + def test_model_multiple_choice_number_of_queries(self): + """ + ModelMultipleChoiceField does O(1) queries instead of O(n) (#10156). + """ + persons = [Writer.objects.create(name="Person %s" % i) for i in range(30)] + + f = forms.ModelMultipleChoiceField(queryset=Writer.objects.all()) + self.assertNumQueries(1, f.clean, [p.pk for p in persons[1:11:2]]) + + def test_model_multiple_choice_run_validators(self): + """ + ModelMultipleChoiceField run given validators (#14144). + """ + for i in range(30): + Writer.objects.create(name="Person %s" % i) + + self._validator_run = False + + def my_validator(value): + self._validator_run = True + + f = forms.ModelMultipleChoiceField( + queryset=Writer.objects.all(), validators=[my_validator] + ) + f.clean([p.pk for p in Writer.objects.all()[8:9]]) + self.assertTrue(self._validator_run) + + def test_model_multiple_choice_show_hidden_initial(self): + """ + Test support of show_hidden_initial by ModelMultipleChoiceField. + """ + + class WriterForm(forms.Form): + persons = forms.ModelMultipleChoiceField( + show_hidden_initial=True, queryset=Writer.objects.all() + ) + + person1 = Writer.objects.create(name="Person 1") + person2 = Writer.objects.create(name="Person 2") + + form = WriterForm( + initial={"persons": [person1, person2]}, + data={ + "initial-persons": [str(person1.pk), str(person2.pk)], + "persons": [str(person1.pk), str(person2.pk)], + }, + ) + self.assertTrue(form.is_valid()) + self.assertFalse(form.has_changed()) + + form = WriterForm( + initial={"persons": [person1, person2]}, + data={ + "initial-persons": [str(person1.pk), str(person2.pk)], + "persons": [str(person2.pk)], + }, + ) + self.assertTrue(form.is_valid()) + self.assertTrue(form.has_changed()) + + def test_model_multiple_choice_field_22745(self): + """ + #22745 -- Make sure that ModelMultipleChoiceField with + CheckboxSelectMultiple widget doesn't produce unnecessary db queries + when accessing its BoundField's attrs. + """ + + class ModelMultipleChoiceForm(forms.Form): + categories = forms.ModelMultipleChoiceField( + Category.objects.all(), widget=forms.CheckboxSelectMultiple + ) + + form = ModelMultipleChoiceForm() + field = form["categories"] # BoundField + template = Template("{{ field.name }}{{ field }}{{ field.help_text }}") + with self.assertNumQueries(1): + template.render(Context({"field": field})) + + def test_show_hidden_initial_changed_queries_efficiently(self): + class WriterForm(forms.Form): + persons = forms.ModelMultipleChoiceField( + show_hidden_initial=True, queryset=Writer.objects.all() + ) + + writers = (Writer.objects.create(name=str(x)) for x in range(0, 50)) + writer_pks = tuple(x.pk for x in writers) + form = WriterForm(data={"initial-persons": writer_pks}) + with self.assertNumQueries(1): + self.assertTrue(form.has_changed()) + + def test_clean_does_deduplicate_values(self): + class PersonForm(forms.Form): + persons = forms.ModelMultipleChoiceField(queryset=Person.objects.all()) + + person1 = Person.objects.create(name="Person 1") + form = PersonForm(data={}) + queryset = form.fields["persons"].clean([str(person1.pk)] * 50) + sql, params = queryset.query.sql_with_params() + self.assertEqual(len(params), 1) + + def test_to_field_name_with_initial_data(self): + class ArticleCategoriesForm(forms.ModelForm): + categories = forms.ModelMultipleChoiceField( + Category.objects.all(), to_field_name="slug" + ) + + class Meta: + model = Article + fields = ["categories"] + + article = Article.objects.create( + headline="Test article", + slug="test-article", + pub_date=datetime.date(1988, 1, 4), + writer=Writer.objects.create(name="Test writer"), + article="Hello.", + ) + article.categories.add(self.c2, self.c3) + form = ArticleCategoriesForm(instance=article) + self.assertCountEqual(form["categories"].value(), [self.c2.slug, self.c3.slug]) + + +class ModelOneToOneFieldTests(TestCase): + def test_modelform_onetoonefield(self): + class ImprovedArticleForm(forms.ModelForm): + class Meta: + model = ImprovedArticle + fields = "__all__" + + class ImprovedArticleWithParentLinkForm(forms.ModelForm): + class Meta: + model = ImprovedArticleWithParentLink + fields = "__all__" + + self.assertEqual(list(ImprovedArticleForm.base_fields), ["article"]) + self.assertEqual(list(ImprovedArticleWithParentLinkForm.base_fields), []) + + def test_modelform_subclassed_model(self): + class BetterWriterForm(forms.ModelForm): + class Meta: + # BetterWriter model is a subclass of Writer with an additional + # `score` field. + model = BetterWriter + fields = "__all__" + + bw = BetterWriter.objects.create(name="Joe Better", score=10) + self.assertEqual( + sorted(model_to_dict(bw)), ["id", "name", "score", "writer_ptr"] + ) + self.assertEqual(sorted(model_to_dict(bw, fields=[])), []) + self.assertEqual( + sorted(model_to_dict(bw, fields=["id", "name"])), ["id", "name"] + ) + self.assertEqual( + sorted(model_to_dict(bw, exclude=[])), ["id", "name", "score", "writer_ptr"] + ) + self.assertEqual( + sorted(model_to_dict(bw, exclude=["id", "name"])), ["score", "writer_ptr"] + ) + + form = BetterWriterForm({"name": "Some Name", "score": 12}) + self.assertTrue(form.is_valid()) + bw2 = form.save() + self.assertEqual(bw2.score, 12) + + def test_onetoonefield(self): + class WriterProfileForm(forms.ModelForm): + class Meta: + # WriterProfile has a OneToOneField to Writer + model = WriterProfile + fields = "__all__" + + self.w_royko = Writer.objects.create(name="Mike Royko") + self.w_woodward = Writer.objects.create(name="Bob Woodward") + + form = WriterProfileForm() + self.assertHTMLEqual( + form.as_p(), + """ +

    +

    +

    +

    + """ + % ( + self.w_woodward.pk, + self.w_royko.pk, + ), + ) + + data = { + "writer": str(self.w_woodward.pk), + "age": "65", + } + form = WriterProfileForm(data) + instance = form.save() + self.assertEqual(str(instance), "Bob Woodward is 65") + + form = WriterProfileForm(instance=instance) + self.assertHTMLEqual( + form.as_p(), + """ +

    +

    +

    + +

    """ + % ( + self.w_woodward.pk, + self.w_royko.pk, + ), + ) + + def test_assignment_of_none(self): + class AuthorForm(forms.ModelForm): + class Meta: + model = Author + fields = ["publication", "full_name"] + + publication = Publication.objects.create( + title="Pravda", date_published=datetime.date(1991, 8, 22) + ) + author = Author.objects.create(publication=publication, full_name="John Doe") + form = AuthorForm({"publication": "", "full_name": "John Doe"}, instance=author) + self.assertTrue(form.is_valid()) + self.assertIsNone(form.cleaned_data["publication"]) + author = form.save() + # author object returned from form still retains original publication object + # that's why we need to retrieve it from database again + new_author = Author.objects.get(pk=author.pk) + self.assertIsNone(new_author.publication) + + def test_assignment_of_none_null_false(self): + class AuthorForm(forms.ModelForm): + class Meta: + model = Author1 + fields = ["publication", "full_name"] + + publication = Publication.objects.create( + title="Pravda", date_published=datetime.date(1991, 8, 22) + ) + author = Author1.objects.create(publication=publication, full_name="John Doe") + form = AuthorForm({"publication": "", "full_name": "John Doe"}, instance=author) + self.assertFalse(form.is_valid()) + + +class FileAndImageFieldTests(TestCase): + def test_clean_false(self): + """ + If the ``clean`` method on a non-required FileField receives False as + the data (meaning clear the field value), it returns False, regardless + of the value of ``initial``. + """ + f = forms.FileField(required=False) + self.assertIs(f.clean(False), False) + self.assertIs(f.clean(False, "initial"), False) + + def test_clean_false_required(self): + """ + If the ``clean`` method on a required FileField receives False as the + data, it has the same effect as None: initial is returned if non-empty, + otherwise the validation catches the lack of a required value. + """ + f = forms.FileField(required=True) + self.assertEqual(f.clean(False, "initial"), "initial") + with self.assertRaises(ValidationError): + f.clean(False) + + def test_full_clear(self): + """ + Integration happy-path test that a model FileField can actually be set + and cleared via a ModelForm. + """ + + class DocumentForm(forms.ModelForm): + class Meta: + model = Document + fields = "__all__" + + form = DocumentForm() + self.assertIn('name="myfile"', str(form)) + self.assertNotIn("myfile-clear", str(form)) + form = DocumentForm( + files={"myfile": SimpleUploadedFile("something.txt", b"content")} + ) + self.assertTrue(form.is_valid()) + doc = form.save(commit=False) + self.assertEqual(doc.myfile.name, "something.txt") + form = DocumentForm(instance=doc) + self.assertIn("myfile-clear", str(form)) + form = DocumentForm(instance=doc, data={"myfile-clear": "true"}) + doc = form.save(commit=False) + self.assertFalse(doc.myfile) + + def test_clear_and_file_contradiction(self): + """ + If the user submits a new file upload AND checks the clear checkbox, + they get a validation error, and the bound redisplay of the form still + includes the current file and the clear checkbox. + """ + + class DocumentForm(forms.ModelForm): + class Meta: + model = Document + fields = "__all__" + + form = DocumentForm( + files={"myfile": SimpleUploadedFile("something.txt", b"content")} + ) + self.assertTrue(form.is_valid()) + doc = form.save(commit=False) + form = DocumentForm( + instance=doc, + files={"myfile": SimpleUploadedFile("something.txt", b"content")}, + data={"myfile-clear": "true"}, + ) + self.assertTrue(not form.is_valid()) + self.assertEqual( + form.errors["myfile"], + ["Please either submit a file or check the clear checkbox, not both."], + ) + rendered = str(form) + self.assertIn("something.txt", rendered) + self.assertIn("myfile-clear", rendered) + + def test_render_empty_file_field(self): + class DocumentForm(forms.ModelForm): + class Meta: + model = Document + fields = "__all__" + + doc = Document.objects.create() + form = DocumentForm(instance=doc) + self.assertHTMLEqual( + str(form["myfile"]), '' + ) + + def test_file_field_data(self): + # Test conditions when files is either not given or empty. + f = TextFileForm(data={"description": "Assistance"}) + self.assertFalse(f.is_valid()) + f = TextFileForm(data={"description": "Assistance"}, files={}) + self.assertFalse(f.is_valid()) + + # Upload a file and ensure it all works as expected. + f = TextFileForm( + data={"description": "Assistance"}, + files={"file": SimpleUploadedFile("test1.txt", b"hello world")}, + ) + self.assertTrue(f.is_valid()) + self.assertEqual(type(f.cleaned_data["file"]), SimpleUploadedFile) + instance = f.save() + self.assertEqual(instance.file.name, "tests/test1.txt") + instance.file.delete() + + # If the previous file has been deleted, the file name can be reused + f = TextFileForm( + data={"description": "Assistance"}, + files={"file": SimpleUploadedFile("test1.txt", b"hello world")}, + ) + self.assertTrue(f.is_valid()) + self.assertEqual(type(f.cleaned_data["file"]), SimpleUploadedFile) + instance = f.save() + self.assertEqual(instance.file.name, "tests/test1.txt") + + # Check if the max_length attribute has been inherited from the model. + f = TextFileForm( + data={"description": "Assistance"}, + files={"file": SimpleUploadedFile("test-maxlength.txt", b"hello world")}, + ) + self.assertFalse(f.is_valid()) + + # Edit an instance that already has the file defined in the model. This will not + # save the file again, but leave it exactly as it is. + f = TextFileForm({"description": "Assistance"}, instance=instance) + self.assertTrue(f.is_valid()) + self.assertEqual(f.cleaned_data["file"].name, "tests/test1.txt") + instance = f.save() + self.assertEqual(instance.file.name, "tests/test1.txt") + + # Delete the current file since this is not done by Django. + instance.file.delete() + + # Override the file by uploading a new one. + f = TextFileForm( + data={"description": "Assistance"}, + files={"file": SimpleUploadedFile("test2.txt", b"hello world")}, + instance=instance, + ) + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.file.name, "tests/test2.txt") + + # Delete the current file since this is not done by Django. + instance.file.delete() + instance.delete() + + def test_filefield_required_false(self): + # Test the non-required FileField + f = TextFileForm(data={"description": "Assistance"}) + f.fields["file"].required = False + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.file.name, "") + + f = TextFileForm( + data={"description": "Assistance"}, + files={"file": SimpleUploadedFile("test3.txt", b"hello world")}, + instance=instance, + ) + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.file.name, "tests/test3.txt") + + # Instance can be edited w/out re-uploading the file and existing file + # should be preserved. + f = TextFileForm({"description": "New Description"}, instance=instance) + f.fields["file"].required = False + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.description, "New Description") + self.assertEqual(instance.file.name, "tests/test3.txt") + + # Delete the current file since this is not done by Django. + instance.file.delete() + instance.delete() + + def test_custom_file_field_save(self): + """ + Regression for #11149: save_form_data should be called only once + """ + + class CFFForm(forms.ModelForm): + class Meta: + model = CustomFF + fields = "__all__" + + # It's enough that the form saves without error -- the custom save routine will + # generate an AssertionError if it is called more than once during save. + form = CFFForm(data={"f": None}) + form.save() + + def test_file_field_multiple_save(self): + """ + Simulate a file upload and check how many times Model.save() gets + called. Test for bug #639. + """ + + class PhotoForm(forms.ModelForm): + class Meta: + model = Photo + fields = "__all__" + + # Grab an image for testing. + filename = os.path.join(os.path.dirname(__file__), "test.png") + with open(filename, "rb") as fp: + img = fp.read() + + # Fake a POST QueryDict and FILES MultiValueDict. + data = {"title": "Testing"} + files = {"image": SimpleUploadedFile("test.png", img, "image/png")} + + form = PhotoForm(data=data, files=files) + p = form.save() + + try: + # Check the savecount stored on the object (see the model). + self.assertEqual(p._savecount, 1) + finally: + # Delete the "uploaded" file to avoid clogging /tmp. + p = Photo.objects.get() + p.image.delete(save=False) + + def test_file_path_field_blank(self): + """FilePathField(blank=True) includes the empty option.""" + + class FPForm(forms.ModelForm): + class Meta: + model = FilePathModel + fields = "__all__" + + form = FPForm() + self.assertEqual( + [name for _, name in form["path"].field.choices], ["---------", "models.py"] + ) + + @skipUnless(test_images, "Pillow not installed") + def test_image_field(self): + # ImageField and FileField are nearly identical, but they differ slightly when + # it comes to validation. This specifically tests that #6302 is fixed for + # both file fields and image fields. + + with open(os.path.join(os.path.dirname(__file__), "test.png"), "rb") as fp: + image_data = fp.read() + with open(os.path.join(os.path.dirname(__file__), "test2.png"), "rb") as fp: + image_data2 = fp.read() + + f = ImageFileForm( + data={"description": "An image"}, + files={"image": SimpleUploadedFile("test.png", image_data)}, + ) + self.assertTrue(f.is_valid()) + self.assertEqual(type(f.cleaned_data["image"]), SimpleUploadedFile) + instance = f.save() + self.assertEqual(instance.image.name, "tests/test.png") + self.assertEqual(instance.width, 16) + self.assertEqual(instance.height, 16) + + # Delete the current file since this is not done by Django, but don't save + # because the dimension fields are not null=True. + instance.image.delete(save=False) + f = ImageFileForm( + data={"description": "An image"}, + files={"image": SimpleUploadedFile("test.png", image_data)}, + ) + self.assertTrue(f.is_valid()) + self.assertEqual(type(f.cleaned_data["image"]), SimpleUploadedFile) + instance = f.save() + self.assertEqual(instance.image.name, "tests/test.png") + self.assertEqual(instance.width, 16) + self.assertEqual(instance.height, 16) + + # Edit an instance that already has the (required) image defined in the + # model. This will not save the image again, but leave it exactly as it + # is. + + f = ImageFileForm(data={"description": "Look, it changed"}, instance=instance) + self.assertTrue(f.is_valid()) + self.assertEqual(f.cleaned_data["image"].name, "tests/test.png") + instance = f.save() + self.assertEqual(instance.image.name, "tests/test.png") + self.assertEqual(instance.height, 16) + self.assertEqual(instance.width, 16) + + # Delete the current file since this is not done by Django, but don't save + # because the dimension fields are not null=True. + instance.image.delete(save=False) + # Override the file by uploading a new one. + + f = ImageFileForm( + data={"description": "Changed it"}, + files={"image": SimpleUploadedFile("test2.png", image_data2)}, + instance=instance, + ) + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.image.name, "tests/test2.png") + self.assertEqual(instance.height, 32) + self.assertEqual(instance.width, 48) + + # Delete the current file since this is not done by Django, but don't save + # because the dimension fields are not null=True. + instance.image.delete(save=False) + instance.delete() + + f = ImageFileForm( + data={"description": "Changed it"}, + files={"image": SimpleUploadedFile("test2.png", image_data2)}, + ) + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.image.name, "tests/test2.png") + self.assertEqual(instance.height, 32) + self.assertEqual(instance.width, 48) + + # Delete the current file since this is not done by Django, but don't save + # because the dimension fields are not null=True. + instance.image.delete(save=False) + instance.delete() + + # Test the non-required ImageField + # Note: In Oracle, we expect a null ImageField to return '' instead of + # None. + if connection.features.interprets_empty_strings_as_nulls: + expected_null_imagefield_repr = "" + else: + expected_null_imagefield_repr = None + + f = OptionalImageFileForm(data={"description": "Test"}) + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.image.name, expected_null_imagefield_repr) + self.assertIsNone(instance.width) + self.assertIsNone(instance.height) + + f = OptionalImageFileForm( + data={"description": "And a final one"}, + files={"image": SimpleUploadedFile("test3.png", image_data)}, + instance=instance, + ) + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.image.name, "tests/test3.png") + self.assertEqual(instance.width, 16) + self.assertEqual(instance.height, 16) + + # Editing the instance without re-uploading the image should not affect + # the image or its width/height properties. + f = OptionalImageFileForm({"description": "New Description"}, instance=instance) + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.description, "New Description") + self.assertEqual(instance.image.name, "tests/test3.png") + self.assertEqual(instance.width, 16) + self.assertEqual(instance.height, 16) + + # Delete the current file since this is not done by Django. + instance.image.delete() + instance.delete() + + f = OptionalImageFileForm( + data={"description": "And a final one"}, + files={"image": SimpleUploadedFile("test4.png", image_data2)}, + ) + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.image.name, "tests/test4.png") + self.assertEqual(instance.width, 48) + self.assertEqual(instance.height, 32) + instance.delete() + # Callable upload_to behavior that's dependent on the value of another + # field in the model. + f = ImageFileForm( + data={"description": "And a final one", "path": "foo"}, + files={"image": SimpleUploadedFile("test4.png", image_data)}, + ) + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.image.name, "foo/test4.png") + instance.delete() + + # Editing an instance that has an image without an extension shouldn't + # fail validation. First create: + f = NoExtensionImageFileForm( + data={"description": "An image"}, + files={"image": SimpleUploadedFile("test.png", image_data)}, + ) + self.assertTrue(f.is_valid()) + instance = f.save() + self.assertEqual(instance.image.name, "tests/no_extension") + # Then edit: + f = NoExtensionImageFileForm( + data={"description": "Edited image"}, instance=instance + ) + self.assertTrue(f.is_valid()) + + +class ModelOtherFieldTests(SimpleTestCase): + def test_big_integer_field(self): + bif = BigIntForm({"biggie": "-9223372036854775808"}) + self.assertTrue(bif.is_valid()) + bif = BigIntForm({"biggie": "-9223372036854775809"}) + self.assertFalse(bif.is_valid()) + self.assertEqual( + bif.errors, + { + "biggie": [ + "Ensure this value is greater than or equal to " + "-9223372036854775808." + ] + }, + ) + bif = BigIntForm({"biggie": "9223372036854775807"}) + self.assertTrue(bif.is_valid()) + bif = BigIntForm({"biggie": "9223372036854775808"}) + self.assertFalse(bif.is_valid()) + self.assertEqual( + bif.errors, + { + "biggie": [ + "Ensure this value is less than or equal to 9223372036854775807." + ] + }, + ) + + @ignore_warnings(category=RemovedInDjango60Warning) + def test_url_on_modelform(self): + "Check basic URL field validation on model forms" + + class HomepageForm(forms.ModelForm): + class Meta: + model = Homepage + fields = "__all__" + + self.assertFalse(HomepageForm({"url": "foo"}).is_valid()) + self.assertFalse(HomepageForm({"url": "http://"}).is_valid()) + self.assertFalse(HomepageForm({"url": "http://example"}).is_valid()) + self.assertFalse(HomepageForm({"url": "http://example."}).is_valid()) + self.assertFalse(HomepageForm({"url": "http://com."}).is_valid()) + + self.assertTrue(HomepageForm({"url": "http://localhost"}).is_valid()) + self.assertTrue(HomepageForm({"url": "http://example.com"}).is_valid()) + self.assertTrue(HomepageForm({"url": "http://www.example.com"}).is_valid()) + self.assertTrue(HomepageForm({"url": "http://www.example.com:8000"}).is_valid()) + self.assertTrue(HomepageForm({"url": "http://www.example.com/test"}).is_valid()) + self.assertTrue( + HomepageForm({"url": "http://www.example.com:8000/test"}).is_valid() + ) + self.assertTrue(HomepageForm({"url": "http://example.com/foo/bar"}).is_valid()) + + def test_url_modelform_assume_scheme_warning(self): + msg = ( + "The default scheme will be changed from 'http' to 'https' in Django " + "6.0. Pass the forms.URLField.assume_scheme argument to silence this " + "warning." + ) + with self.assertWarnsMessage(RemovedInDjango60Warning, msg): + + class HomepageForm(forms.ModelForm): + class Meta: + model = Homepage + fields = "__all__" + + def test_modelform_non_editable_field(self): + """ + When explicitly including a non-editable field in a ModelForm, the + error message should be explicit. + """ + # 'created', non-editable, is excluded by default + self.assertNotIn("created", ArticleForm().fields) + + msg = ( + "'created' cannot be specified for Article model form as it is a " + "non-editable field" + ) + with self.assertRaisesMessage(FieldError, msg): + + class InvalidArticleForm(forms.ModelForm): + class Meta: + model = Article + fields = ("headline", "created") + + def test_https_prefixing(self): + """ + If the https:// prefix is omitted on form input, the field adds it + again. + """ + + class HomepageForm(forms.ModelForm): + # RemovedInDjango60Warning. + url = forms.URLField(assume_scheme="https") + + class Meta: + model = Homepage + fields = "__all__" + + form = HomepageForm({"url": "example.com"}) + self.assertTrue(form.is_valid()) + self.assertEqual(form.cleaned_data["url"], "https://example.com") + + form = HomepageForm({"url": "example.com/test"}) + self.assertTrue(form.is_valid()) + self.assertEqual(form.cleaned_data["url"], "https://example.com/test") + + +class OtherModelFormTests(TestCase): + def test_media_on_modelform(self): + # Similar to a regular Form class you can define custom media to be used on + # the ModelForm. + f = ModelFormWithMedia() + self.assertHTMLEqual( + str(f.media), + '' + '', + ) + + def test_choices_type(self): + # Choices on CharField and IntegerField + f = ArticleForm() + with self.assertRaises(ValidationError): + f.fields["status"].clean("42") + + f = ArticleStatusForm() + with self.assertRaises(ValidationError): + f.fields["status"].clean("z") + + def test_prefetch_related_queryset(self): + """ + ModelChoiceField should respect a prefetch_related() on its queryset. + """ + blue = Colour.objects.create(name="blue") + red = Colour.objects.create(name="red") + multicolor_item = ColourfulItem.objects.create() + multicolor_item.colours.add(blue, red) + red_item = ColourfulItem.objects.create() + red_item.colours.add(red) + + class ColorModelChoiceField(forms.ModelChoiceField): + def label_from_instance(self, obj): + return ", ".join(c.name for c in obj.colours.all()) + + field = ColorModelChoiceField(ColourfulItem.objects.prefetch_related("colours")) + with self.assertNumQueries(3): # would be 4 if prefetch is ignored + self.assertEqual( + tuple(field.choices), + ( + ("", "---------"), + (multicolor_item.pk, "blue, red"), + (red_item.pk, "red"), + ), + ) + + def test_foreignkeys_which_use_to_field(self): + apple = Inventory.objects.create(barcode=86, name="Apple") + pear = Inventory.objects.create(barcode=22, name="Pear") + core = Inventory.objects.create(barcode=87, name="Core", parent=apple) + + field = forms.ModelChoiceField(Inventory.objects.all(), to_field_name="barcode") + self.assertEqual( + tuple(field.choices), + (("", "---------"), (86, "Apple"), (87, "Core"), (22, "Pear")), + ) + + form = InventoryForm(instance=core) + self.assertHTMLEqual( + str(form["parent"]), + """""", + ) + data = model_to_dict(core) + data["parent"] = "22" + form = InventoryForm(data=data, instance=core) + core = form.save() + self.assertEqual(core.parent.name, "Pear") + + class CategoryForm(forms.ModelForm): + description = forms.CharField() + + class Meta: + model = Category + fields = ["description", "url"] + + self.assertEqual(list(CategoryForm.base_fields), ["description", "url"]) + + self.assertHTMLEqual( + str(CategoryForm()), + '
    ' + '
    ', + ) + # to_field_name should also work on ModelMultipleChoiceField ################## + + field = forms.ModelMultipleChoiceField( + Inventory.objects.all(), to_field_name="barcode" + ) + self.assertEqual( + tuple(field.choices), ((86, "Apple"), (87, "Core"), (22, "Pear")) + ) + self.assertSequenceEqual(field.clean([86]), [apple]) + + form = SelectInventoryForm({"items": [87, 22]}) + self.assertTrue(form.is_valid()) + self.assertEqual(len(form.cleaned_data), 1) + self.assertSequenceEqual(form.cleaned_data["items"], [core, pear]) + + def test_model_field_that_returns_none_to_exclude_itself_with_explicit_fields(self): + self.assertEqual(list(CustomFieldForExclusionForm.base_fields), ["name"]) + self.assertHTMLEqual( + str(CustomFieldForExclusionForm()), + '
    ', + ) + + def test_iterable_model_m2m(self): + class ColourfulItemForm(forms.ModelForm): + class Meta: + model = ColourfulItem + fields = "__all__" + + colour = Colour.objects.create(name="Blue") + form = ColourfulItemForm() + self.maxDiff = 1024 + self.assertHTMLEqual( + form.as_p(), + """ +

    + +

    +

    +

    + """ + % {"blue_pk": colour.pk}, + ) + + def test_callable_field_default(self): + class PublicationDefaultsForm(forms.ModelForm): + class Meta: + model = PublicationDefaults + fields = ("title", "date_published", "mode", "category") + + self.maxDiff = 2000 + form = PublicationDefaultsForm() + today_str = str(datetime.date.today()) + self.assertHTMLEqual( + form.as_p(), + """ +

    + +

    +

    + + +

    +

    + +

    +

    + + + + """.format( + today_str + ), + ) + empty_data = { + "title": "", + "date_published": today_str, + "initial-date_published": today_str, + "mode": "di", + "initial-mode": "di", + "category": "3", + "initial-category": "3", + } + bound_form = PublicationDefaultsForm(empty_data) + self.assertFalse(bound_form.has_changed()) + + +class ModelFormCustomErrorTests(SimpleTestCase): + def test_custom_error_messages(self): + data = {"name1": "@#$!!**@#$", "name2": "@#$!!**@#$"} + errors = CustomErrorMessageForm(data).errors + self.assertHTMLEqual( + str(errors["name1"]), + '

    • Form custom error message.
    ', + ) + self.assertHTMLEqual( + str(errors["name2"]), + '
    • Model custom error message.
    ', + ) + + def test_model_clean_error_messages(self): + data = {"name1": "FORBIDDEN_VALUE", "name2": "ABC"} + form = CustomErrorMessageForm(data) + self.assertFalse(form.is_valid()) + self.assertHTMLEqual( + str(form.errors["name1"]), + '
    • Model.clean() error messages.
    ', + ) + data = {"name1": "FORBIDDEN_VALUE2", "name2": "ABC"} + form = CustomErrorMessageForm(data) + self.assertFalse(form.is_valid()) + self.assertHTMLEqual( + str(form.errors["name1"]), + '
      ' + "
    • Model.clean() error messages (simpler syntax).
    ", + ) + data = {"name1": "GLOBAL_ERROR", "name2": "ABC"} + form = CustomErrorMessageForm(data) + self.assertFalse(form.is_valid()) + self.assertEqual(form.errors["__all__"], ["Global error message."]) + + +class CustomCleanTests(TestCase): + def test_override_clean(self): + """ + Regression for #12596: Calling super from ModelForm.clean() should be + optional. + """ + + class TripleFormWithCleanOverride(forms.ModelForm): + class Meta: + model = Triple + fields = "__all__" + + def clean(self): + if not self.cleaned_data["left"] == self.cleaned_data["right"]: + raise ValidationError("Left and right should be equal") + return self.cleaned_data + + form = TripleFormWithCleanOverride({"left": 1, "middle": 2, "right": 1}) + self.assertTrue(form.is_valid()) + # form.instance.left will be None if the instance was not constructed + # by form.full_clean(). + self.assertEqual(form.instance.left, 1) + + def test_model_form_clean_applies_to_model(self): + """ + Regression test for #12960. Make sure the cleaned_data returned from + ModelForm.clean() is applied to the model instance. + """ + + class CategoryForm(forms.ModelForm): + class Meta: + model = Category + fields = "__all__" + + def clean(self): + self.cleaned_data["name"] = self.cleaned_data["name"].upper() + return self.cleaned_data + + data = {"name": "Test", "slug": "test", "url": "/test"} + form = CategoryForm(data) + category = form.save() + self.assertEqual(category.name, "TEST") + + +class ModelFormInheritanceTests(SimpleTestCase): + def test_form_subclass_inheritance(self): + class Form(forms.Form): + age = forms.IntegerField() + + class ModelForm(forms.ModelForm, Form): + class Meta: + model = Writer + fields = "__all__" + + self.assertEqual(list(ModelForm().fields), ["name", "age"]) + + def test_field_removal(self): + class ModelForm(forms.ModelForm): + class Meta: + model = Writer + fields = "__all__" + + class Mixin: + age = None + + class Form(forms.Form): + age = forms.IntegerField() + + class Form2(forms.Form): + foo = forms.IntegerField() + + self.assertEqual(list(ModelForm().fields), ["name"]) + self.assertEqual(list(type("NewForm", (Mixin, Form), {})().fields), []) + self.assertEqual( + list(type("NewForm", (Form2, Mixin, Form), {})().fields), ["foo"] + ) + self.assertEqual( + list(type("NewForm", (Mixin, ModelForm, Form), {})().fields), ["name"] + ) + self.assertEqual( + list(type("NewForm", (ModelForm, Mixin, Form), {})().fields), ["name"] + ) + self.assertEqual( + list(type("NewForm", (ModelForm, Form, Mixin), {})().fields), + ["name", "age"], + ) + self.assertEqual( + list(type("NewForm", (ModelForm, Form), {"age": None})().fields), ["name"] + ) + + def test_field_removal_name_clashes(self): + """ + Form fields can be removed in subclasses by setting them to None + (#22510). + """ + + class MyForm(forms.ModelForm): + media = forms.CharField() + + class Meta: + model = Writer + fields = "__all__" + + class SubForm(MyForm): + media = None + + self.assertIn("media", MyForm().fields) + self.assertNotIn("media", SubForm().fields) + self.assertTrue(hasattr(MyForm, "media")) + self.assertTrue(hasattr(SubForm, "media")) + + +class StumpJokeForm(forms.ModelForm): + class Meta: + model = StumpJoke + fields = "__all__" + + +class CustomFieldWithQuerysetButNoLimitChoicesTo(forms.Field): + queryset = 42 + + +class StumpJokeWithCustomFieldForm(forms.ModelForm): + custom = CustomFieldWithQuerysetButNoLimitChoicesTo() + + class Meta: + model = StumpJoke + fields = () + + +class LimitChoicesToTests(TestCase): + """ + Tests the functionality of ``limit_choices_to``. + """ + + @classmethod + def setUpTestData(cls): + cls.threepwood = Character.objects.create( + username="threepwood", + last_action=datetime.datetime.today() + datetime.timedelta(days=1), + ) + cls.marley = Character.objects.create( + username="marley", + last_action=datetime.datetime.today() - datetime.timedelta(days=1), + ) + + def test_limit_choices_to_callable_for_fk_rel(self): + """ + A ForeignKey can use limit_choices_to as a callable (#2554). + """ + stumpjokeform = StumpJokeForm() + self.assertSequenceEqual( + stumpjokeform.fields["most_recently_fooled"].queryset, [self.threepwood] + ) + + def test_limit_choices_to_callable_for_m2m_rel(self): + """ + A ManyToManyField can use limit_choices_to as a callable (#2554). + """ + stumpjokeform = StumpJokeForm() + self.assertSequenceEqual( + stumpjokeform.fields["most_recently_fooled"].queryset, [self.threepwood] + ) + + def test_custom_field_with_queryset_but_no_limit_choices_to(self): + """ + A custom field with a `queryset` attribute but no `limit_choices_to` + works (#23795). + """ + f = StumpJokeWithCustomFieldForm() + self.assertEqual(f.fields["custom"].queryset, 42) + + def test_fields_for_model_applies_limit_choices_to(self): + fields = fields_for_model(StumpJoke, ["has_fooled_today"]) + self.assertSequenceEqual(fields["has_fooled_today"].queryset, [self.threepwood]) + + def test_callable_called_each_time_form_is_instantiated(self): + field = StumpJokeForm.base_fields["most_recently_fooled"] + with mock.patch.object(field, "limit_choices_to") as today_callable_dict: + StumpJokeForm() + self.assertEqual(today_callable_dict.call_count, 1) + StumpJokeForm() + self.assertEqual(today_callable_dict.call_count, 2) + StumpJokeForm() + self.assertEqual(today_callable_dict.call_count, 3) + + @isolate_apps("model_forms") + def test_limit_choices_to_no_duplicates(self): + joke1 = StumpJoke.objects.create( + funny=True, + most_recently_fooled=self.threepwood, + ) + joke2 = StumpJoke.objects.create( + funny=True, + most_recently_fooled=self.threepwood, + ) + joke3 = StumpJoke.objects.create( + funny=True, + most_recently_fooled=self.marley, + ) + StumpJoke.objects.create(funny=False, most_recently_fooled=self.marley) + joke1.has_fooled_today.add(self.marley, self.threepwood) + joke2.has_fooled_today.add(self.marley) + joke3.has_fooled_today.add(self.marley, self.threepwood) + + class CharacterDetails(models.Model): + character1 = models.ForeignKey( + Character, + models.CASCADE, + limit_choices_to=models.Q( + jokes__funny=True, + jokes_today__funny=True, + ), + related_name="details_fk_1", + ) + character2 = models.ForeignKey( + Character, + models.CASCADE, + limit_choices_to={ + "jokes__funny": True, + "jokes_today__funny": True, + }, + related_name="details_fk_2", + ) + character3 = models.ManyToManyField( + Character, + limit_choices_to=models.Q( + jokes__funny=True, + jokes_today__funny=True, + ), + related_name="details_m2m_1", + ) + + class CharacterDetailsForm(forms.ModelForm): + class Meta: + model = CharacterDetails + fields = "__all__" + + form = CharacterDetailsForm() + self.assertCountEqual( + form.fields["character1"].queryset, + [self.marley, self.threepwood], + ) + self.assertCountEqual( + form.fields["character2"].queryset, + [self.marley, self.threepwood], + ) + self.assertCountEqual( + form.fields["character3"].queryset, + [self.marley, self.threepwood], + ) + + def test_limit_choices_to_m2m_through(self): + class DiceForm(forms.ModelForm): + class Meta: + model = Dice + fields = ["numbers"] + + Number.objects.create(value=0) + n1 = Number.objects.create(value=1) + n2 = Number.objects.create(value=2) + + form = DiceForm() + self.assertCountEqual(form.fields["numbers"].queryset, [n1, n2]) + + +class FormFieldCallbackTests(SimpleTestCase): + def test_baseform_with_widgets_in_meta(self): + """ + Using base forms with widgets defined in Meta should not raise errors. + """ + widget = forms.Textarea() + + class BaseForm(forms.ModelForm): + class Meta: + model = Person + widgets = {"name": widget} + fields = "__all__" + + Form = modelform_factory(Person, form=BaseForm) + self.assertIsInstance(Form.base_fields["name"].widget, forms.Textarea) + + def test_factory_with_widget_argument(self): + """Regression for #15315: modelform_factory should accept widgets + argument + """ + widget = forms.Textarea() + + # Without a widget should not set the widget to textarea + Form = modelform_factory(Person, fields="__all__") + self.assertNotEqual(Form.base_fields["name"].widget.__class__, forms.Textarea) + + # With a widget should not set the widget to textarea + Form = modelform_factory(Person, fields="__all__", widgets={"name": widget}) + self.assertEqual(Form.base_fields["name"].widget.__class__, forms.Textarea) + + def test_modelform_factory_without_fields(self): + """Regression for #19733""" + message = ( + "Calling modelform_factory without defining 'fields' or 'exclude' " + "explicitly is prohibited." + ) + with self.assertRaisesMessage(ImproperlyConfigured, message): + modelform_factory(Person) + + def test_modelform_factory_with_all_fields(self): + """Regression for #19733""" + form = modelform_factory(Person, fields="__all__") + self.assertEqual(list(form.base_fields), ["name"]) + + def test_custom_callback(self): + """A custom formfield_callback is used if provided""" + callback_args = [] + + def callback(db_field, **kwargs): + callback_args.append((db_field, kwargs)) + return db_field.formfield(**kwargs) + + widget = forms.Textarea() + + class BaseForm(forms.ModelForm): + class Meta: + model = Person + widgets = {"name": widget} + fields = "__all__" + + modelform_factory(Person, form=BaseForm, formfield_callback=callback) + id_field, name_field = Person._meta.fields + + self.assertEqual( + callback_args, [(id_field, {}), (name_field, {"widget": widget})] + ) + + def test_bad_callback(self): + # A bad callback provided by user still gives an error + with self.assertRaises(TypeError): + modelform_factory( + Person, + fields="__all__", + formfield_callback="not a function or callable", + ) + + def test_inherit_after_custom_callback(self): + def callback(db_field, **kwargs): + if isinstance(db_field, models.CharField): + return forms.CharField(widget=forms.Textarea) + return db_field.formfield(**kwargs) + + class BaseForm(forms.ModelForm): + class Meta: + model = Person + fields = "__all__" + + NewForm = modelform_factory(Person, form=BaseForm, formfield_callback=callback) + + class InheritedForm(NewForm): + pass + + for name in NewForm.base_fields: + self.assertEqual( + type(InheritedForm.base_fields[name].widget), + type(NewForm.base_fields[name].widget), + ) + + def test_custom_callback_in_meta(self): + def callback(db_field, **kwargs): + return forms.CharField(widget=forms.Textarea) + + class NewForm(forms.ModelForm): + class Meta: + model = Person + fields = ["id", "name"] + formfield_callback = callback + + for field in NewForm.base_fields.values(): + self.assertEqual(type(field.widget), forms.Textarea) + + def test_custom_callback_from_base_form_meta(self): + def callback(db_field, **kwargs): + return forms.CharField(widget=forms.Textarea) + + class BaseForm(forms.ModelForm): + class Meta: + model = Person + fields = "__all__" + formfield_callback = callback + + NewForm = modelform_factory(model=Person, form=BaseForm) + + class InheritedForm(NewForm): + pass + + for name, field in NewForm.base_fields.items(): + self.assertEqual(type(field.widget), forms.Textarea) + self.assertEqual( + type(field.widget), + type(InheritedForm.base_fields[name].widget), + ) + + +class LocalizedModelFormTest(TestCase): + def test_model_form_applies_localize_to_some_fields(self): + class PartiallyLocalizedTripleForm(forms.ModelForm): + class Meta: + model = Triple + localized_fields = ( + "left", + "right", + ) + fields = "__all__" + + f = PartiallyLocalizedTripleForm({"left": 10, "middle": 10, "right": 10}) + self.assertTrue(f.is_valid()) + self.assertTrue(f.fields["left"].localize) + self.assertFalse(f.fields["middle"].localize) + self.assertTrue(f.fields["right"].localize) + + def test_model_form_applies_localize_to_all_fields(self): + class FullyLocalizedTripleForm(forms.ModelForm): + class Meta: + model = Triple + localized_fields = "__all__" + fields = "__all__" + + f = FullyLocalizedTripleForm({"left": 10, "middle": 10, "right": 10}) + self.assertTrue(f.is_valid()) + self.assertTrue(f.fields["left"].localize) + self.assertTrue(f.fields["middle"].localize) + self.assertTrue(f.fields["right"].localize) + + def test_model_form_refuses_arbitrary_string(self): + msg = ( + "BrokenLocalizedTripleForm.Meta.localized_fields " + "cannot be a string. Did you mean to type: ('foo',)?" + ) + with self.assertRaisesMessage(TypeError, msg): + + class BrokenLocalizedTripleForm(forms.ModelForm): + class Meta: + model = Triple + localized_fields = "foo" + + +class CustomMetaclass(ModelFormMetaclass): + def __new__(cls, name, bases, attrs): + new = super().__new__(cls, name, bases, attrs) + new.base_fields = {} + return new + + +class CustomMetaclassForm(forms.ModelForm, metaclass=CustomMetaclass): + pass + + +class CustomMetaclassTestCase(SimpleTestCase): + def test_modelform_factory_metaclass(self): + new_cls = modelform_factory(Person, fields="__all__", form=CustomMetaclassForm) + self.assertEqual(new_cls.base_fields, {}) + + +class StrictAssignmentTests(SimpleTestCase): + """ + Should a model do anything special with __setattr__() or descriptors which + raise a ValidationError, a model form should catch the error (#24706). + """ + + def test_setattr_raises_validation_error_field_specific(self): + """ + A model ValidationError using the dict form should put the error + message into the correct key of form.errors. + """ + form_class = modelform_factory( + model=StrictAssignmentFieldSpecific, fields=["title"] + ) + form = form_class(data={"title": "testing setattr"}, files=None) + # This line turns on the ValidationError; it avoids the model erroring + # when its own __init__() is called when creating form.instance. + form.instance._should_error = True + self.assertFalse(form.is_valid()) + self.assertEqual( + form.errors, + {"title": ["Cannot set attribute", "This field cannot be blank."]}, + ) + + def test_setattr_raises_validation_error_non_field(self): + """ + A model ValidationError not using the dict form should put the error + message into __all__ (i.e. non-field errors) on the form. + """ + form_class = modelform_factory(model=StrictAssignmentAll, fields=["title"]) + form = form_class(data={"title": "testing setattr"}, files=None) + # This line turns on the ValidationError; it avoids the model erroring + # when its own __init__() is called when creating form.instance. + form.instance._should_error = True + self.assertFalse(form.is_valid()) + self.assertEqual( + form.errors, + { + "__all__": ["Cannot set attribute"], + "title": ["This field cannot be blank."], + }, + ) + + +class ModelToDictTests(TestCase): + def test_many_to_many(self): + """Data for a ManyToManyField is a list rather than a lazy QuerySet.""" + blue = Colour.objects.create(name="blue") + red = Colour.objects.create(name="red") + item = ColourfulItem.objects.create() + item.colours.set([blue]) + data = model_to_dict(item)["colours"] + self.assertEqual(data, [blue]) + item.colours.set([red]) + # If data were a QuerySet, it would be reevaluated here and give "red" + # instead of the original value. + self.assertEqual(data, [blue]) diff --git a/testbed/django__django/tests/model_formsets/__init__.py b/testbed/django__django/tests/model_formsets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_formsets/models.py b/testbed/django__django/tests/model_formsets/models.py new file mode 100644 index 0000000000000000000000000000000000000000..a2965395d6776c7d3d0dd0a9446fde20d3c57b34 --- /dev/null +++ b/testbed/django__django/tests/model_formsets/models.py @@ -0,0 +1,265 @@ +import datetime +import uuid + +from django.db import models + + +class Author(models.Model): + name = models.CharField(max_length=100) + + class Meta: + ordering = ("name",) + + def __str__(self): + return self.name + + +class BetterAuthor(Author): + write_speed = models.IntegerField() + + +class Book(models.Model): + author = models.ForeignKey(Author, models.CASCADE) + title = models.CharField(max_length=100) + + class Meta: + unique_together = (("author", "title"),) + ordering = ["id"] + + def __str__(self): + return self.title + + def clean(self): + # Ensure author is always accessible in clean method + assert self.author.name is not None + + +class BookWithCustomPK(models.Model): + my_pk = models.DecimalField(max_digits=5, decimal_places=0, primary_key=True) + author = models.ForeignKey(Author, models.CASCADE) + title = models.CharField(max_length=100) + + def __str__(self): + return "%s: %s" % (self.my_pk, self.title) + + +class Editor(models.Model): + name = models.CharField(max_length=100) + + +class BookWithOptionalAltEditor(models.Model): + author = models.ForeignKey(Author, models.CASCADE) + # Optional secondary author + alt_editor = models.ForeignKey(Editor, models.SET_NULL, blank=True, null=True) + title = models.CharField(max_length=100) + + class Meta: + unique_together = (("author", "title", "alt_editor"),) + + def __str__(self): + return self.title + + +class AlternateBook(Book): + notes = models.CharField(max_length=100) + + def __str__(self): + return "%s - %s" % (self.title, self.notes) + + +class AuthorMeeting(models.Model): + name = models.CharField(max_length=100) + authors = models.ManyToManyField(Author) + created = models.DateField(editable=False) + + def __str__(self): + return self.name + + +class CustomPrimaryKey(models.Model): + my_pk = models.CharField(max_length=10, primary_key=True) + some_field = models.CharField(max_length=100) + + +# models for inheritance tests. + + +class Place(models.Model): + name = models.CharField(max_length=50) + city = models.CharField(max_length=50) + + def __str__(self): + return self.name + + +class Owner(models.Model): + auto_id = models.AutoField(primary_key=True) + name = models.CharField(max_length=100) + place = models.ForeignKey(Place, models.CASCADE) + + def __str__(self): + return "%s at %s" % (self.name, self.place) + + +class Location(models.Model): + place = models.ForeignKey(Place, models.CASCADE, unique=True) + # this is purely for testing the data doesn't matter here :) + lat = models.CharField(max_length=100) + lon = models.CharField(max_length=100) + + +class OwnerProfile(models.Model): + owner = models.OneToOneField(Owner, models.CASCADE, primary_key=True) + age = models.PositiveIntegerField() + + def __str__(self): + return "%s is %d" % (self.owner.name, self.age) + + +class Restaurant(Place): + serves_pizza = models.BooleanField(default=False) + + +class Product(models.Model): + slug = models.SlugField(unique=True) + + def __str__(self): + return self.slug + + +class Price(models.Model): + price = models.DecimalField(max_digits=10, decimal_places=2) + quantity = models.PositiveIntegerField() + + class Meta: + unique_together = (("price", "quantity"),) + + def __str__(self): + return "%s for %s" % (self.quantity, self.price) + + +class MexicanRestaurant(Restaurant): + serves_tacos = models.BooleanField(default=False) + + +class ClassyMexicanRestaurant(MexicanRestaurant): + the_restaurant = models.OneToOneField( + MexicanRestaurant, models.CASCADE, parent_link=True, primary_key=True + ) + tacos_are_yummy = models.BooleanField(default=False) + + +# models for testing unique_together validation when a fk is involved and +# using inlineformset_factory. +class Repository(models.Model): + name = models.CharField(max_length=25) + + def __str__(self): + return self.name + + +class Revision(models.Model): + repository = models.ForeignKey(Repository, models.CASCADE) + revision = models.CharField(max_length=40) + + class Meta: + unique_together = (("repository", "revision"),) + + def __str__(self): + return "%s (%s)" % (self.revision, str(self.repository)) + + +# models for testing callable defaults (see bug #7975). If you define a model +# with a callable default value, you cannot rely on the initial value in a +# form. +class Person(models.Model): + name = models.CharField(max_length=128) + + +class Membership(models.Model): + person = models.ForeignKey(Person, models.CASCADE) + date_joined = models.DateTimeField(default=datetime.datetime.now) + karma = models.IntegerField() + + +# models for testing a null=True fk to a parent +class Team(models.Model): + name = models.CharField(max_length=100) + + +class Player(models.Model): + team = models.ForeignKey(Team, models.SET_NULL, null=True) + name = models.CharField(max_length=100) + + def __str__(self): + return self.name + + +# Models for testing custom ModelForm save methods in formsets and inline formsets +class Poet(models.Model): + name = models.CharField(max_length=100) + + def __str__(self): + return self.name + + +class Poem(models.Model): + poet = models.ForeignKey(Poet, models.CASCADE) + name = models.CharField(max_length=100) + + def __str__(self): + return self.name + + +class Post(models.Model): + title = models.CharField(max_length=50, unique_for_date="posted", blank=True) + slug = models.CharField(max_length=50, unique_for_year="posted", blank=True) + subtitle = models.CharField(max_length=50, unique_for_month="posted", blank=True) + posted = models.DateField() + + def __str__(self): + return self.title + + +# Models for testing UUID primary keys +class UUIDPKParent(models.Model): + uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + name = models.CharField(max_length=255) + + +class UUIDPKChild(models.Model): + uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + name = models.CharField(max_length=255) + parent = models.ForeignKey(UUIDPKParent, models.CASCADE) + + +class ChildWithEditablePK(models.Model): + name = models.CharField(max_length=255, primary_key=True) + parent = models.ForeignKey(UUIDPKParent, models.CASCADE) + + +class AutoPKChildOfUUIDPKParent(models.Model): + name = models.CharField(max_length=255) + parent = models.ForeignKey(UUIDPKParent, models.CASCADE) + + +class AutoPKParent(models.Model): + name = models.CharField(max_length=255) + + +class UUIDPKChildOfAutoPKParent(models.Model): + uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + name = models.CharField(max_length=255) + parent = models.ForeignKey(AutoPKParent, models.CASCADE) + + +class ParentWithUUIDAlternateKey(models.Model): + uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False) + name = models.CharField(max_length=50) + + +class ChildRelatedViaAK(models.Model): + name = models.CharField(max_length=255) + parent = models.ForeignKey( + ParentWithUUIDAlternateKey, models.CASCADE, to_field="uuid" + ) diff --git a/testbed/django__django/tests/model_formsets/test_uuid.py b/testbed/django__django/tests/model_formsets/test_uuid.py new file mode 100644 index 0000000000000000000000000000000000000000..0a2d504c8401ac7119c5919987ea37392f80a029 --- /dev/null +++ b/testbed/django__django/tests/model_formsets/test_uuid.py @@ -0,0 +1,117 @@ +from django.forms.models import inlineformset_factory +from django.test import TestCase + +from .models import ( + AutoPKChildOfUUIDPKParent, + AutoPKParent, + ChildRelatedViaAK, + ChildWithEditablePK, + ParentWithUUIDAlternateKey, + UUIDPKChild, + UUIDPKChildOfAutoPKParent, + UUIDPKParent, +) + + +class InlineFormsetTests(TestCase): + def test_inlineformset_factory_nulls_default_pks(self): + """ + #24377 - If we're adding a new object, a parent's auto-generated pk + from the model field default should be ignored as it's regenerated on + the save request. + + Tests the case where both the parent and child have a UUID primary key. + """ + FormSet = inlineformset_factory(UUIDPKParent, UUIDPKChild, fields="__all__") + formset = FormSet() + self.assertIsNone(formset.forms[0].fields["parent"].initial) + + def test_inlineformset_factory_ignores_default_pks_on_submit(self): + """ + #24377 - Inlines with a model field default should ignore that default + value to avoid triggering validation on empty forms. + """ + FormSet = inlineformset_factory(UUIDPKParent, UUIDPKChild, fields="__all__") + formset = FormSet( + { + "uuidpkchild_set-TOTAL_FORMS": 3, + "uuidpkchild_set-INITIAL_FORMS": 0, + "uuidpkchild_set-MAX_NUM_FORMS": "", + "uuidpkchild_set-0-name": "Foo", + "uuidpkchild_set-1-name": "", + "uuidpkchild_set-2-name": "", + } + ) + self.assertTrue(formset.is_valid()) + self.assertIsNone(formset.instance.uuid) + self.assertIsNone(formset.forms[0].instance.parent_id) + + def test_inlineformset_factory_nulls_default_pks_uuid_parent_auto_child(self): + """ + #24958 - Variant of test_inlineformset_factory_nulls_default_pks for + the case of a parent object with a UUID primary key and a child object + with an AutoField primary key. + """ + FormSet = inlineformset_factory( + UUIDPKParent, AutoPKChildOfUUIDPKParent, fields="__all__" + ) + formset = FormSet() + self.assertIsNone(formset.forms[0].fields["parent"].initial) + + def test_inlineformset_factory_nulls_default_pks_auto_parent_uuid_child(self): + """ + #24958 - Variant of test_inlineformset_factory_nulls_default_pks for + the case of a parent object with an AutoField primary key and a child + object with a UUID primary key. + """ + FormSet = inlineformset_factory( + AutoPKParent, UUIDPKChildOfAutoPKParent, fields="__all__" + ) + formset = FormSet() + self.assertIsNone(formset.forms[0].fields["parent"].initial) + + def test_inlineformset_factory_nulls_default_pks_child_editable_pk(self): + """ + #24958 - Variant of test_inlineformset_factory_nulls_default_pks for + the case of a parent object with a UUID primary key and a child + object with an editable natural key for a primary key. + """ + FormSet = inlineformset_factory( + UUIDPKParent, ChildWithEditablePK, fields="__all__" + ) + formset = FormSet() + self.assertIsNone(formset.forms[0].fields["parent"].initial) + + def test_inlineformset_factory_nulls_default_pks_alternate_key_relation(self): + """ + #24958 - Variant of test_inlineformset_factory_nulls_default_pks for + the case of a parent object with a UUID alternate key and a child + object that relates to that alternate key. + """ + FormSet = inlineformset_factory( + ParentWithUUIDAlternateKey, ChildRelatedViaAK, fields="__all__" + ) + formset = FormSet() + self.assertIsNone(formset.forms[0].fields["parent"].initial) + + def test_inlineformset_factory_nulls_default_pks_alternate_key_relation_data(self): + """ + If form data is provided, a parent's auto-generated alternate key is + set. + """ + FormSet = inlineformset_factory( + ParentWithUUIDAlternateKey, ChildRelatedViaAK, fields="__all__" + ) + formset = FormSet( + { + "childrelatedviaak_set-TOTAL_FORMS": 3, + "childrelatedviaak_set-INITIAL_FORMS": 0, + "childrelatedviaak_set-MAX_NUM_FORMS": "", + "childrelatedviaak_set-0-name": "Test", + "childrelatedviaak_set-1-name": "", + "childrelatedviaak_set-2-name": "", + } + ) + self.assertIs(formset.is_valid(), True) + self.assertIsNotNone(formset.instance.uuid) + self.assertEqual(formset.forms[0].instance.parent_id, formset.instance.uuid) diff --git a/testbed/django__django/tests/model_formsets/tests.py b/testbed/django__django/tests/model_formsets/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..598dc57e7a2446b77e49dfdc09fb80e872b4447f --- /dev/null +++ b/testbed/django__django/tests/model_formsets/tests.py @@ -0,0 +1,2367 @@ +import datetime +import re +from datetime import date +from decimal import Decimal + +from django import forms +from django.core.exceptions import ImproperlyConfigured +from django.db import models +from django.forms.formsets import formset_factory +from django.forms.models import ( + BaseModelFormSet, + _get_foreign_key, + inlineformset_factory, + modelformset_factory, +) +from django.http import QueryDict +from django.test import TestCase, skipUnlessDBFeature + +from .models import ( + AlternateBook, + Author, + AuthorMeeting, + BetterAuthor, + Book, + BookWithCustomPK, + BookWithOptionalAltEditor, + ClassyMexicanRestaurant, + CustomPrimaryKey, + Location, + Membership, + MexicanRestaurant, + Owner, + OwnerProfile, + Person, + Place, + Player, + Poem, + Poet, + Post, + Price, + Product, + Repository, + Restaurant, + Revision, + Team, +) + + +class DeletionTests(TestCase): + def test_deletion(self): + PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True) + poet = Poet.objects.create(name="test") + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "1", + "form-MAX_NUM_FORMS": "0", + "form-0-id": str(poet.pk), + "form-0-name": "test", + "form-0-DELETE": "on", + } + formset = PoetFormSet(data, queryset=Poet.objects.all()) + formset.save(commit=False) + self.assertEqual(Poet.objects.count(), 1) + + formset.save() + self.assertTrue(formset.is_valid()) + self.assertEqual(Poet.objects.count(), 0) + + def test_add_form_deletion_when_invalid(self): + """ + Make sure that an add form that is filled out, but marked for deletion + doesn't cause validation errors. + """ + PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True) + poet = Poet.objects.create(name="test") + # One existing untouched and two new unvalid forms + data = { + "form-TOTAL_FORMS": "3", + "form-INITIAL_FORMS": "1", + "form-MAX_NUM_FORMS": "0", + "form-0-id": str(poet.id), + "form-0-name": "test", + "form-1-id": "", + "form-1-name": "x" * 1000, # Too long + "form-2-id": str(poet.id), # Violate unique constraint + "form-2-name": "test2", + } + formset = PoetFormSet(data, queryset=Poet.objects.all()) + # Make sure this form doesn't pass validation. + self.assertIs(formset.is_valid(), False) + self.assertEqual(Poet.objects.count(), 1) + + # Then make sure that it *does* pass validation and delete the object, + # even though the data in new forms aren't actually valid. + data["form-0-DELETE"] = "on" + data["form-1-DELETE"] = "on" + data["form-2-DELETE"] = "on" + formset = PoetFormSet(data, queryset=Poet.objects.all()) + self.assertIs(formset.is_valid(), True) + formset.save() + self.assertEqual(Poet.objects.count(), 0) + + def test_change_form_deletion_when_invalid(self): + """ + Make sure that a change form that is filled out, but marked for deletion + doesn't cause validation errors. + """ + PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True) + poet = Poet.objects.create(name="test") + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "1", + "form-MAX_NUM_FORMS": "0", + "form-0-id": str(poet.id), + "form-0-name": "x" * 1000, + } + formset = PoetFormSet(data, queryset=Poet.objects.all()) + # Make sure this form doesn't pass validation. + self.assertIs(formset.is_valid(), False) + self.assertEqual(Poet.objects.count(), 1) + + # Then make sure that it *does* pass validation and delete the object, + # even though the data isn't actually valid. + data["form-0-DELETE"] = "on" + formset = PoetFormSet(data, queryset=Poet.objects.all()) + self.assertIs(formset.is_valid(), True) + formset.save() + self.assertEqual(Poet.objects.count(), 0) + + def test_outdated_deletion(self): + poet = Poet.objects.create(name="test") + poem = Poem.objects.create(name="Brevity is the soul of wit", poet=poet) + + PoemFormSet = inlineformset_factory( + Poet, Poem, fields="__all__", can_delete=True + ) + + # Simulate deletion of an object that doesn't exist in the database + data = { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "2", + "form-0-id": str(poem.pk), + "form-0-name": "foo", + "form-1-id": str(poem.pk + 1), # doesn't exist + "form-1-name": "bar", + "form-1-DELETE": "on", + } + formset = PoemFormSet(data, instance=poet, prefix="form") + + # The formset is valid even though poem.pk + 1 doesn't exist, + # because it's marked for deletion anyway + self.assertTrue(formset.is_valid()) + + formset.save() + + # Make sure the save went through correctly + self.assertEqual(Poem.objects.get(pk=poem.pk).name, "foo") + self.assertEqual(poet.poem_set.count(), 1) + self.assertFalse(Poem.objects.filter(pk=poem.pk + 1).exists()) + + +class ModelFormsetTest(TestCase): + def test_modelformset_factory_without_fields(self): + """Regression for #19733""" + message = ( + "Calling modelformset_factory without defining 'fields' or 'exclude' " + "explicitly is prohibited." + ) + with self.assertRaisesMessage(ImproperlyConfigured, message): + modelformset_factory(Author) + + def test_simple_save(self): + qs = Author.objects.all() + AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=3) + + formset = AuthorFormSet(queryset=qs) + self.assertEqual(len(formset.forms), 3) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '

    ', + ) + self.assertHTMLEqual( + formset.forms[1].as_p(), + '

    ' + '' + '

    ', + ) + self.assertHTMLEqual( + formset.forms[2].as_p(), + '

    ' + '' + '

    ', + ) + + data = { + "form-TOTAL_FORMS": "3", # the number of forms rendered + "form-INITIAL_FORMS": "0", # the number of forms with initial data + "form-MAX_NUM_FORMS": "", # the max number of forms + "form-0-name": "Charles Baudelaire", + "form-1-name": "Arthur Rimbaud", + "form-2-name": "", + } + + formset = AuthorFormSet(data=data, queryset=qs) + self.assertTrue(formset.is_valid()) + + saved = formset.save() + self.assertEqual(len(saved), 2) + author1, author2 = saved + self.assertEqual(author1, Author.objects.get(name="Charles Baudelaire")) + self.assertEqual(author2, Author.objects.get(name="Arthur Rimbaud")) + + authors = list(Author.objects.order_by("name")) + self.assertEqual(authors, [author2, author1]) + + # Gah! We forgot Paul Verlaine. Let's create a formset to edit the + # existing authors with an extra form to add him. We *could* pass in a + # queryset to restrict the Author objects we edit, but in this case + # we'll use it to display them in alphabetical order by name. + + qs = Author.objects.order_by("name") + AuthorFormSet = modelformset_factory( + Author, fields="__all__", extra=1, can_delete=False + ) + + formset = AuthorFormSet(queryset=qs) + self.assertEqual(len(formset.forms), 3) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '

    ' + % author2.id, + ) + self.assertHTMLEqual( + formset.forms[1].as_p(), + '

    ' + '' + '

    ' + % author1.id, + ) + self.assertHTMLEqual( + formset.forms[2].as_p(), + '

    ' + '' + '

    ', + ) + + data = { + "form-TOTAL_FORMS": "3", # the number of forms rendered + "form-INITIAL_FORMS": "2", # the number of forms with initial data + "form-MAX_NUM_FORMS": "", # the max number of forms + "form-0-id": str(author2.id), + "form-0-name": "Arthur Rimbaud", + "form-1-id": str(author1.id), + "form-1-name": "Charles Baudelaire", + "form-2-name": "Paul Verlaine", + } + + formset = AuthorFormSet(data=data, queryset=qs) + self.assertTrue(formset.is_valid()) + + # Only changed or new objects are returned from formset.save() + saved = formset.save() + self.assertEqual(len(saved), 1) + author3 = saved[0] + self.assertEqual(author3, Author.objects.get(name="Paul Verlaine")) + + authors = list(Author.objects.order_by("name")) + self.assertEqual(authors, [author2, author1, author3]) + + # This probably shouldn't happen, but it will. If an add form was + # marked for deletion, make sure we don't save that form. + + qs = Author.objects.order_by("name") + AuthorFormSet = modelformset_factory( + Author, fields="__all__", extra=1, can_delete=True + ) + + formset = AuthorFormSet(queryset=qs) + self.assertEqual(len(formset.forms), 4) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '

    ' + '

    ' + '' + '

    ' + % author2.id, + ) + self.assertHTMLEqual( + formset.forms[1].as_p(), + '

    ' + '

    ' + '

    ' + '' + '

    ' + % author1.id, + ) + self.assertHTMLEqual( + formset.forms[2].as_p(), + '

    ' + '

    ' + '

    ' + '' + '

    ' + % author3.id, + ) + self.assertHTMLEqual( + formset.forms[3].as_p(), + '

    ' + '' + '

    ' + '' + '

    ', + ) + + data = { + "form-TOTAL_FORMS": "4", # the number of forms rendered + "form-INITIAL_FORMS": "3", # the number of forms with initial data + "form-MAX_NUM_FORMS": "", # the max number of forms + "form-0-id": str(author2.id), + "form-0-name": "Arthur Rimbaud", + "form-1-id": str(author1.id), + "form-1-name": "Charles Baudelaire", + "form-2-id": str(author3.id), + "form-2-name": "Paul Verlaine", + "form-3-name": "Walt Whitman", + "form-3-DELETE": "on", + } + + formset = AuthorFormSet(data=data, queryset=qs) + self.assertTrue(formset.is_valid()) + + # No objects were changed or saved so nothing will come back. + + self.assertEqual(formset.save(), []) + + authors = list(Author.objects.order_by("name")) + self.assertEqual(authors, [author2, author1, author3]) + + # Let's edit a record to ensure save only returns that one record. + + data = { + "form-TOTAL_FORMS": "4", # the number of forms rendered + "form-INITIAL_FORMS": "3", # the number of forms with initial data + "form-MAX_NUM_FORMS": "", # the max number of forms + "form-0-id": str(author2.id), + "form-0-name": "Walt Whitman", + "form-1-id": str(author1.id), + "form-1-name": "Charles Baudelaire", + "form-2-id": str(author3.id), + "form-2-name": "Paul Verlaine", + "form-3-name": "", + "form-3-DELETE": "", + } + + formset = AuthorFormSet(data=data, queryset=qs) + self.assertTrue(formset.is_valid()) + + # One record has changed. + + saved = formset.save() + self.assertEqual(len(saved), 1) + self.assertEqual(saved[0], Author.objects.get(name="Walt Whitman")) + + def test_commit_false(self): + # Test the behavior of commit=False and save_m2m + + author1 = Author.objects.create(name="Charles Baudelaire") + author2 = Author.objects.create(name="Paul Verlaine") + author3 = Author.objects.create(name="Walt Whitman") + + meeting = AuthorMeeting.objects.create(created=date.today()) + meeting.authors.set(Author.objects.all()) + + # create an Author instance to add to the meeting. + + author4 = Author.objects.create(name="John Steinbeck") + + AuthorMeetingFormSet = modelformset_factory( + AuthorMeeting, fields="__all__", extra=1, can_delete=True + ) + data = { + "form-TOTAL_FORMS": "2", # the number of forms rendered + "form-INITIAL_FORMS": "1", # the number of forms with initial data + "form-MAX_NUM_FORMS": "", # the max number of forms + "form-0-id": str(meeting.id), + "form-0-name": "2nd Tuesday of the Week Meeting", + "form-0-authors": [author2.id, author1.id, author3.id, author4.id], + "form-1-name": "", + "form-1-authors": "", + "form-1-DELETE": "", + } + formset = AuthorMeetingFormSet(data=data, queryset=AuthorMeeting.objects.all()) + self.assertTrue(formset.is_valid()) + + instances = formset.save(commit=False) + for instance in instances: + instance.created = date.today() + instance.save() + formset.save_m2m() + self.assertSequenceEqual( + instances[0].authors.all(), + [author1, author4, author2, author3], + ) + + def test_max_num(self): + # Test the behavior of max_num with model formsets. It should allow + # all existing related objects/inlines for a given object to be + # displayed, but not allow the creation of new inlines beyond max_num. + + a1 = Author.objects.create(name="Charles Baudelaire") + a2 = Author.objects.create(name="Paul Verlaine") + a3 = Author.objects.create(name="Walt Whitman") + + qs = Author.objects.order_by("name") + + AuthorFormSet = modelformset_factory( + Author, fields="__all__", max_num=None, extra=3 + ) + formset = AuthorFormSet(queryset=qs) + self.assertEqual(len(formset.forms), 6) + self.assertEqual(len(formset.extra_forms), 3) + + AuthorFormSet = modelformset_factory( + Author, fields="__all__", max_num=4, extra=3 + ) + formset = AuthorFormSet(queryset=qs) + self.assertEqual(len(formset.forms), 4) + self.assertEqual(len(formset.extra_forms), 1) + + AuthorFormSet = modelformset_factory( + Author, fields="__all__", max_num=0, extra=3 + ) + formset = AuthorFormSet(queryset=qs) + self.assertEqual(len(formset.forms), 3) + self.assertEqual(len(formset.extra_forms), 0) + + AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=None) + formset = AuthorFormSet(queryset=qs) + self.assertSequenceEqual(formset.get_queryset(), [a1, a2, a3]) + + AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=0) + formset = AuthorFormSet(queryset=qs) + self.assertSequenceEqual(formset.get_queryset(), [a1, a2, a3]) + + AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=4) + formset = AuthorFormSet(queryset=qs) + self.assertSequenceEqual(formset.get_queryset(), [a1, a2, a3]) + + def test_min_num(self): + # Test the behavior of min_num with model formsets. It should be + # added to extra. + qs = Author.objects.none() + + AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=0) + formset = AuthorFormSet(queryset=qs) + self.assertEqual(len(formset.forms), 0) + + AuthorFormSet = modelformset_factory( + Author, fields="__all__", min_num=1, extra=0 + ) + formset = AuthorFormSet(queryset=qs) + self.assertEqual(len(formset.forms), 1) + + AuthorFormSet = modelformset_factory( + Author, fields="__all__", min_num=1, extra=1 + ) + formset = AuthorFormSet(queryset=qs) + self.assertEqual(len(formset.forms), 2) + + def test_min_num_with_existing(self): + # Test the behavior of min_num with existing objects. + Author.objects.create(name="Charles Baudelaire") + qs = Author.objects.all() + + AuthorFormSet = modelformset_factory( + Author, fields="__all__", extra=0, min_num=1 + ) + formset = AuthorFormSet(queryset=qs) + self.assertEqual(len(formset.forms), 1) + + def test_custom_save_method(self): + class PoetForm(forms.ModelForm): + def save(self, commit=True): + # change the name to "Vladimir Mayakovsky" just to be a jerk. + author = super().save(commit=False) + author.name = "Vladimir Mayakovsky" + if commit: + author.save() + return author + + PoetFormSet = modelformset_factory(Poet, fields="__all__", form=PoetForm) + + data = { + "form-TOTAL_FORMS": "3", # the number of forms rendered + "form-INITIAL_FORMS": "0", # the number of forms with initial data + "form-MAX_NUM_FORMS": "", # the max number of forms + "form-0-name": "Walt Whitman", + "form-1-name": "Charles Baudelaire", + "form-2-name": "", + } + + qs = Poet.objects.all() + formset = PoetFormSet(data=data, queryset=qs) + self.assertTrue(formset.is_valid()) + + poets = formset.save() + self.assertEqual(len(poets), 2) + poet1, poet2 = poets + self.assertEqual(poet1.name, "Vladimir Mayakovsky") + self.assertEqual(poet2.name, "Vladimir Mayakovsky") + + def test_custom_form(self): + """ + model_formset_factory() respects fields and exclude parameters of a + custom form. + """ + + class PostForm1(forms.ModelForm): + class Meta: + model = Post + fields = ("title", "posted") + + class PostForm2(forms.ModelForm): + class Meta: + model = Post + exclude = ("subtitle",) + + PostFormSet = modelformset_factory(Post, form=PostForm1) + formset = PostFormSet() + self.assertNotIn("subtitle", formset.forms[0].fields) + + PostFormSet = modelformset_factory(Post, form=PostForm2) + formset = PostFormSet() + self.assertNotIn("subtitle", formset.forms[0].fields) + + def test_custom_queryset_init(self): + """ + A queryset can be overridden in the formset's __init__() method. + """ + Author.objects.create(name="Charles Baudelaire") + Author.objects.create(name="Paul Verlaine") + + class BaseAuthorFormSet(BaseModelFormSet): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.queryset = Author.objects.filter(name__startswith="Charles") + + AuthorFormSet = modelformset_factory( + Author, fields="__all__", formset=BaseAuthorFormSet + ) + formset = AuthorFormSet() + self.assertEqual(len(formset.get_queryset()), 1) + + def test_model_inheritance(self): + BetterAuthorFormSet = modelformset_factory(BetterAuthor, fields="__all__") + formset = BetterAuthorFormSet() + self.assertEqual(len(formset.forms), 1) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '

    ' + '' + '' + "

    ", + ) + + data = { + "form-TOTAL_FORMS": "1", # the number of forms rendered + "form-INITIAL_FORMS": "0", # the number of forms with initial data + "form-MAX_NUM_FORMS": "", # the max number of forms + "form-0-author_ptr": "", + "form-0-name": "Ernest Hemingway", + "form-0-write_speed": "10", + } + + formset = BetterAuthorFormSet(data) + self.assertTrue(formset.is_valid()) + saved = formset.save() + self.assertEqual(len(saved), 1) + (author1,) = saved + self.assertEqual(author1, BetterAuthor.objects.get(name="Ernest Hemingway")) + hemingway_id = BetterAuthor.objects.get(name="Ernest Hemingway").pk + + formset = BetterAuthorFormSet() + self.assertEqual(len(formset.forms), 2) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '

    ' + '

    ' + '' + '

    ' % hemingway_id, + ) + self.assertHTMLEqual( + formset.forms[1].as_p(), + '

    ' + '' + '

    ' + '' + '' + "

    ", + ) + + data = { + "form-TOTAL_FORMS": "2", # the number of forms rendered + "form-INITIAL_FORMS": "1", # the number of forms with initial data + "form-MAX_NUM_FORMS": "", # the max number of forms + "form-0-author_ptr": hemingway_id, + "form-0-name": "Ernest Hemingway", + "form-0-write_speed": "10", + "form-1-author_ptr": "", + "form-1-name": "", + "form-1-write_speed": "", + } + + formset = BetterAuthorFormSet(data) + self.assertTrue(formset.is_valid()) + self.assertEqual(formset.save(), []) + + def test_inline_formsets(self): + # We can also create a formset that is tied to a parent model. This is + # how the admin system's edit inline functionality works. + + AuthorBooksFormSet = inlineformset_factory( + Author, Book, can_delete=False, extra=3, fields="__all__" + ) + author = Author.objects.create(name="Charles Baudelaire") + + formset = AuthorBooksFormSet(instance=author) + self.assertEqual(len(formset.forms), 3) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '' + '' + "

    " % author.id, + ) + self.assertHTMLEqual( + formset.forms[1].as_p(), + '

    ' + '' + '' + '

    ' + % author.id, + ) + self.assertHTMLEqual( + formset.forms[2].as_p(), + '

    ' + '' + '' + '

    ' + % author.id, + ) + + data = { + "book_set-TOTAL_FORMS": "3", # the number of forms rendered + "book_set-INITIAL_FORMS": "0", # the number of forms with initial data + "book_set-MAX_NUM_FORMS": "", # the max number of forms + "book_set-0-title": "Les Fleurs du Mal", + "book_set-1-title": "", + "book_set-2-title": "", + } + + formset = AuthorBooksFormSet(data, instance=author) + self.assertTrue(formset.is_valid()) + + saved = formset.save() + self.assertEqual(len(saved), 1) + (book1,) = saved + self.assertEqual(book1, Book.objects.get(title="Les Fleurs du Mal")) + self.assertSequenceEqual(author.book_set.all(), [book1]) + + # Now that we've added a book to Charles Baudelaire, let's try adding + # another one. This time though, an edit form will be available for + # every existing book. + + AuthorBooksFormSet = inlineformset_factory( + Author, Book, can_delete=False, extra=2, fields="__all__" + ) + author = Author.objects.get(name="Charles Baudelaire") + + formset = AuthorBooksFormSet(instance=author) + self.assertEqual(len(formset.forms), 3) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '' + '

    ' + % ( + author.id, + book1.id, + ), + ) + self.assertHTMLEqual( + formset.forms[1].as_p(), + '

    ' + '' + '' + '

    ' + % author.id, + ) + self.assertHTMLEqual( + formset.forms[2].as_p(), + '

    ' + '' + '' + '

    ' + % author.id, + ) + + data = { + "book_set-TOTAL_FORMS": "3", # the number of forms rendered + "book_set-INITIAL_FORMS": "1", # the number of forms with initial data + "book_set-MAX_NUM_FORMS": "", # the max number of forms + "book_set-0-id": str(book1.id), + "book_set-0-title": "Les Fleurs du Mal", + "book_set-1-title": "Les Paradis Artificiels", + "book_set-2-title": "", + } + + formset = AuthorBooksFormSet(data, instance=author) + self.assertTrue(formset.is_valid()) + + saved = formset.save() + self.assertEqual(len(saved), 1) + (book2,) = saved + self.assertEqual(book2, Book.objects.get(title="Les Paradis Artificiels")) + + # As you can see, 'Les Paradis Artificiels' is now a book belonging to + # Charles Baudelaire. + self.assertSequenceEqual(author.book_set.order_by("title"), [book1, book2]) + + def test_inline_formsets_save_as_new(self): + # The save_as_new parameter lets you re-associate the data to a new + # instance. This is used in the admin for save_as functionality. + AuthorBooksFormSet = inlineformset_factory( + Author, Book, can_delete=False, extra=2, fields="__all__" + ) + Author.objects.create(name="Charles Baudelaire") + + # An immutable QueryDict simulates request.POST. + data = QueryDict(mutable=True) + data.update( + { + "book_set-TOTAL_FORMS": "3", # the number of forms rendered + "book_set-INITIAL_FORMS": "2", # the number of forms with initial data + "book_set-MAX_NUM_FORMS": "", # the max number of forms + "book_set-0-id": "1", + "book_set-0-title": "Les Fleurs du Mal", + "book_set-1-id": "2", + "book_set-1-title": "Les Paradis Artificiels", + "book_set-2-title": "", + } + ) + data._mutable = False + + formset = AuthorBooksFormSet(data, instance=Author(), save_as_new=True) + self.assertTrue(formset.is_valid()) + self.assertIs(data._mutable, False) + + new_author = Author.objects.create(name="Charles Baudelaire") + formset = AuthorBooksFormSet(data, instance=new_author, save_as_new=True) + saved = formset.save() + self.assertEqual(len(saved), 2) + book1, book2 = saved + self.assertEqual(book1.title, "Les Fleurs du Mal") + self.assertEqual(book2.title, "Les Paradis Artificiels") + + # Test using a custom prefix on an inline formset. + + formset = AuthorBooksFormSet(prefix="test") + self.assertEqual(len(formset.forms), 2) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '' + '

    ', + ) + + self.assertHTMLEqual( + formset.forms[1].as_p(), + '

    ' + '' + '' + '

    ', + ) + + def test_inline_formsets_with_custom_pk(self): + # Test inline formsets where the inline-edited object has a custom + # primary key that is not the fk to the parent object. + self.maxDiff = 1024 + + AuthorBooksFormSet2 = inlineformset_factory( + Author, BookWithCustomPK, can_delete=False, extra=1, fields="__all__" + ) + author = Author.objects.create(pk=1, name="Charles Baudelaire") + + formset = AuthorBooksFormSet2(instance=author) + self.assertEqual(len(formset.forms), 1) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '

    ' + '

    ' + '' + '

    ', + ) + + data = { + # The number of forms rendered. + "bookwithcustompk_set-TOTAL_FORMS": "1", + # The number of forms with initial data. + "bookwithcustompk_set-INITIAL_FORMS": "0", + # The max number of forms. + "bookwithcustompk_set-MAX_NUM_FORMS": "", + "bookwithcustompk_set-0-my_pk": "77777", + "bookwithcustompk_set-0-title": "Les Fleurs du Mal", + } + + formset = AuthorBooksFormSet2(data, instance=author) + self.assertTrue(formset.is_valid()) + + saved = formset.save() + self.assertEqual(len(saved), 1) + (book1,) = saved + self.assertEqual(book1.pk, 77777) + + book1 = author.bookwithcustompk_set.get() + self.assertEqual(book1.title, "Les Fleurs du Mal") + + def test_inline_formsets_with_multi_table_inheritance(self): + # Test inline formsets where the inline-edited object uses multi-table + # inheritance, thus has a non AutoField yet auto-created primary key. + + AuthorBooksFormSet3 = inlineformset_factory( + Author, AlternateBook, can_delete=False, extra=1, fields="__all__" + ) + author = Author.objects.create(pk=1, name="Charles Baudelaire") + + formset = AuthorBooksFormSet3(instance=author) + self.assertEqual(len(formset.forms), 1) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '

    ' + '

    ' + '' + '' + '

    ', + ) + + data = { + # The number of forms rendered. + "alternatebook_set-TOTAL_FORMS": "1", + # The number of forms with initial data. + "alternatebook_set-INITIAL_FORMS": "0", + # The max number of forms. + "alternatebook_set-MAX_NUM_FORMS": "", + "alternatebook_set-0-title": "Flowers of Evil", + "alternatebook_set-0-notes": "English translation of Les Fleurs du Mal", + } + + formset = AuthorBooksFormSet3(data, instance=author) + self.assertTrue(formset.is_valid()) + + saved = formset.save() + self.assertEqual(len(saved), 1) + (book1,) = saved + self.assertEqual(book1.title, "Flowers of Evil") + self.assertEqual(book1.notes, "English translation of Les Fleurs du Mal") + + @skipUnlessDBFeature("supports_partially_nullable_unique_constraints") + def test_inline_formsets_with_nullable_unique_together(self): + # Test inline formsets where the inline-edited object has a + # unique_together constraint with a nullable member + + AuthorBooksFormSet4 = inlineformset_factory( + Author, + BookWithOptionalAltEditor, + can_delete=False, + extra=2, + fields="__all__", + ) + author = Author.objects.create(pk=1, name="Charles Baudelaire") + + data = { + # The number of forms rendered. + "bookwithoptionalalteditor_set-TOTAL_FORMS": "2", + # The number of forms with initial data. + "bookwithoptionalalteditor_set-INITIAL_FORMS": "0", + # The max number of forms. + "bookwithoptionalalteditor_set-MAX_NUM_FORMS": "", + "bookwithoptionalalteditor_set-0-author": "1", + "bookwithoptionalalteditor_set-0-title": "Les Fleurs du Mal", + "bookwithoptionalalteditor_set-1-author": "1", + "bookwithoptionalalteditor_set-1-title": "Les Fleurs du Mal", + } + formset = AuthorBooksFormSet4(data, instance=author) + self.assertTrue(formset.is_valid()) + + saved = formset.save() + self.assertEqual(len(saved), 2) + book1, book2 = saved + self.assertEqual(book1.author_id, 1) + self.assertEqual(book1.title, "Les Fleurs du Mal") + self.assertEqual(book2.author_id, 1) + self.assertEqual(book2.title, "Les Fleurs du Mal") + + def test_inline_formsets_with_custom_save_method(self): + AuthorBooksFormSet = inlineformset_factory( + Author, Book, can_delete=False, extra=2, fields="__all__" + ) + author = Author.objects.create(pk=1, name="Charles Baudelaire") + book1 = Book.objects.create( + pk=1, author=author, title="Les Paradis Artificiels" + ) + book2 = Book.objects.create(pk=2, author=author, title="Les Fleurs du Mal") + book3 = Book.objects.create(pk=3, author=author, title="Flowers of Evil") + + class PoemForm(forms.ModelForm): + def save(self, commit=True): + # change the name to "Brooklyn Bridge" just to be a jerk. + poem = super().save(commit=False) + poem.name = "Brooklyn Bridge" + if commit: + poem.save() + return poem + + PoemFormSet = inlineformset_factory(Poet, Poem, form=PoemForm, fields="__all__") + + data = { + "poem_set-TOTAL_FORMS": "3", # the number of forms rendered + "poem_set-INITIAL_FORMS": "0", # the number of forms with initial data + "poem_set-MAX_NUM_FORMS": "", # the max number of forms + "poem_set-0-name": "The Cloud in Trousers", + "poem_set-1-name": "I", + "poem_set-2-name": "", + } + + poet = Poet.objects.create(name="Vladimir Mayakovsky") + formset = PoemFormSet(data=data, instance=poet) + self.assertTrue(formset.is_valid()) + + saved = formset.save() + self.assertEqual(len(saved), 2) + poem1, poem2 = saved + self.assertEqual(poem1.name, "Brooklyn Bridge") + self.assertEqual(poem2.name, "Brooklyn Bridge") + + # We can provide a custom queryset to our InlineFormSet: + + custom_qs = Book.objects.order_by("-title") + formset = AuthorBooksFormSet(instance=author, queryset=custom_qs) + self.assertEqual(len(formset.forms), 5) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '' + '' + "

    ", + ) + self.assertHTMLEqual( + formset.forms[1].as_p(), + '

    ' + '' + '' + '' + "

    ", + ) + self.assertHTMLEqual( + formset.forms[2].as_p(), + '

    ' + '' + '' + '

    ', + ) + self.assertHTMLEqual( + formset.forms[3].as_p(), + '

    ' + '' + '' + '

    ', + ) + self.assertHTMLEqual( + formset.forms[4].as_p(), + '

    ' + '' + '' + '

    ', + ) + + data = { + "book_set-TOTAL_FORMS": "5", # the number of forms rendered + "book_set-INITIAL_FORMS": "3", # the number of forms with initial data + "book_set-MAX_NUM_FORMS": "", # the max number of forms + "book_set-0-id": str(book1.id), + "book_set-0-title": "Les Paradis Artificiels", + "book_set-1-id": str(book2.id), + "book_set-1-title": "Les Fleurs du Mal", + "book_set-2-id": str(book3.id), + "book_set-2-title": "Flowers of Evil", + "book_set-3-title": "Revue des deux mondes", + "book_set-4-title": "", + } + formset = AuthorBooksFormSet(data, instance=author, queryset=custom_qs) + self.assertTrue(formset.is_valid()) + + custom_qs = Book.objects.filter(title__startswith="F") + formset = AuthorBooksFormSet(instance=author, queryset=custom_qs) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '' + '

    ', + ) + self.assertHTMLEqual( + formset.forms[1].as_p(), + '

    ' + '' + '' + '

    ', + ) + self.assertHTMLEqual( + formset.forms[2].as_p(), + '

    ' + '' + '' + '

    ', + ) + + data = { + "book_set-TOTAL_FORMS": "3", # the number of forms rendered + "book_set-INITIAL_FORMS": "1", # the number of forms with initial data + "book_set-MAX_NUM_FORMS": "", # the max number of forms + "book_set-0-id": str(book3.id), + "book_set-0-title": "Flowers of Evil", + "book_set-1-title": "Revue des deux mondes", + "book_set-2-title": "", + } + formset = AuthorBooksFormSet(data, instance=author, queryset=custom_qs) + self.assertTrue(formset.is_valid()) + + def test_inline_formsets_with_custom_save_method_related_instance(self): + """ + The ModelForm.save() method should be able to access the related object + if it exists in the database (#24395). + """ + + class PoemForm2(forms.ModelForm): + def save(self, commit=True): + poem = super().save(commit=False) + poem.name = "%s by %s" % (poem.name, poem.poet.name) + if commit: + poem.save() + return poem + + PoemFormSet = inlineformset_factory( + Poet, Poem, form=PoemForm2, fields="__all__" + ) + data = { + "poem_set-TOTAL_FORMS": "1", + "poem_set-INITIAL_FORMS": "0", + "poem_set-MAX_NUM_FORMS": "", + "poem_set-0-name": "Le Lac", + } + poet = Poet() + formset = PoemFormSet(data=data, instance=poet) + self.assertTrue(formset.is_valid()) + + # The Poet instance is saved after the formset instantiation. This + # happens in admin's changeform_view() when adding a new object and + # some inlines in the same request. + poet.name = "Lamartine" + poet.save() + poem = formset.save()[0] + self.assertEqual(poem.name, "Le Lac by Lamartine") + + def test_inline_formsets_with_wrong_fk_name(self): + """Regression for #23451""" + message = "fk_name 'title' is not a ForeignKey to 'model_formsets.Author'." + with self.assertRaisesMessage(ValueError, message): + inlineformset_factory(Author, Book, fields="__all__", fk_name="title") + + def test_custom_pk(self): + # We need to ensure that it is displayed + + CustomPrimaryKeyFormSet = modelformset_factory( + CustomPrimaryKey, fields="__all__" + ) + formset = CustomPrimaryKeyFormSet() + self.assertEqual(len(formset.forms), 1) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '

    ' + '

    ' + '

    ', + ) + + # Custom primary keys with ForeignKey, OneToOneField and AutoField ############ + + place = Place.objects.create(pk=1, name="Giordanos", city="Chicago") + + FormSet = inlineformset_factory( + Place, Owner, extra=2, can_delete=False, fields="__all__" + ) + formset = FormSet(instance=place) + self.assertEqual(len(formset.forms), 2) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '' + '

    ', + ) + self.assertHTMLEqual( + formset.forms[1].as_p(), + '

    ' + '' + '' + '

    ', + ) + + data = { + "owner_set-TOTAL_FORMS": "2", + "owner_set-INITIAL_FORMS": "0", + "owner_set-MAX_NUM_FORMS": "", + "owner_set-0-auto_id": "", + "owner_set-0-name": "Joe Perry", + "owner_set-1-auto_id": "", + "owner_set-1-name": "", + } + formset = FormSet(data, instance=place) + self.assertTrue(formset.is_valid()) + saved = formset.save() + self.assertEqual(len(saved), 1) + (owner1,) = saved + self.assertEqual(owner1.name, "Joe Perry") + self.assertEqual(owner1.place.name, "Giordanos") + + formset = FormSet(instance=place) + self.assertEqual(len(formset.forms), 3) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '' + '

    ' % owner1.auto_id, + ) + self.assertHTMLEqual( + formset.forms[1].as_p(), + '

    ' + '' + '' + '

    ', + ) + self.assertHTMLEqual( + formset.forms[2].as_p(), + '

    ' + '' + '' + '

    ', + ) + + data = { + "owner_set-TOTAL_FORMS": "3", + "owner_set-INITIAL_FORMS": "1", + "owner_set-MAX_NUM_FORMS": "", + "owner_set-0-auto_id": str(owner1.auto_id), + "owner_set-0-name": "Joe Perry", + "owner_set-1-auto_id": "", + "owner_set-1-name": "Jack Berry", + "owner_set-2-auto_id": "", + "owner_set-2-name": "", + } + formset = FormSet(data, instance=place) + self.assertTrue(formset.is_valid()) + saved = formset.save() + self.assertEqual(len(saved), 1) + (owner2,) = saved + self.assertEqual(owner2.name, "Jack Berry") + self.assertEqual(owner2.place.name, "Giordanos") + + # A custom primary key that is a ForeignKey or OneToOneField get + # rendered for the user to choose. + FormSet = modelformset_factory(OwnerProfile, fields="__all__") + formset = FormSet() + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '

    " + '

    ' + '

    ' + % (owner1.auto_id, owner2.auto_id), + ) + + owner1 = Owner.objects.get(name="Joe Perry") + FormSet = inlineformset_factory( + Owner, OwnerProfile, max_num=1, can_delete=False, fields="__all__" + ) + self.assertEqual(FormSet.max_num, 1) + + formset = FormSet(instance=owner1) + self.assertEqual(len(formset.forms), 1) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '

    ' % owner1.auto_id, + ) + + data = { + "ownerprofile-TOTAL_FORMS": "1", + "ownerprofile-INITIAL_FORMS": "0", + "ownerprofile-MAX_NUM_FORMS": "1", + "ownerprofile-0-owner": "", + "ownerprofile-0-age": "54", + } + formset = FormSet(data, instance=owner1) + self.assertTrue(formset.is_valid()) + saved = formset.save() + self.assertEqual(len(saved), 1) + (profile1,) = saved + self.assertEqual(profile1.owner, owner1) + self.assertEqual(profile1.age, 54) + + formset = FormSet(instance=owner1) + self.assertEqual(len(formset.forms), 1) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '' + '

    ' % owner1.auto_id, + ) + + data = { + "ownerprofile-TOTAL_FORMS": "1", + "ownerprofile-INITIAL_FORMS": "1", + "ownerprofile-MAX_NUM_FORMS": "1", + "ownerprofile-0-owner": str(owner1.auto_id), + "ownerprofile-0-age": "55", + } + formset = FormSet(data, instance=owner1) + self.assertTrue(formset.is_valid()) + saved = formset.save() + self.assertEqual(len(saved), 1) + (profile1,) = saved + self.assertEqual(profile1.owner, owner1) + self.assertEqual(profile1.age, 55) + + def test_unique_true_enforces_max_num_one(self): + # ForeignKey with unique=True should enforce max_num=1 + + place = Place.objects.create(pk=1, name="Giordanos", city="Chicago") + + FormSet = inlineformset_factory( + Place, Location, can_delete=False, fields="__all__" + ) + self.assertEqual(FormSet.max_num, 1) + + formset = FormSet(instance=place) + self.assertEqual(len(formset.forms), 1) + self.assertHTMLEqual( + formset.forms[0].as_p(), + '

    ' + '

    ' + '

    ' + '' + '' + '

    ', + ) + + def test_foreign_keys_in_parents(self): + self.assertEqual(type(_get_foreign_key(Restaurant, Owner)), models.ForeignKey) + self.assertEqual( + type(_get_foreign_key(MexicanRestaurant, Owner)), models.ForeignKey + ) + + def test_unique_validation(self): + FormSet = modelformset_factory(Product, fields="__all__", extra=1) + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "", + "form-0-slug": "car-red", + } + formset = FormSet(data) + self.assertTrue(formset.is_valid()) + saved = formset.save() + self.assertEqual(len(saved), 1) + (product1,) = saved + self.assertEqual(product1.slug, "car-red") + + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "", + "form-0-slug": "car-red", + } + formset = FormSet(data) + self.assertFalse(formset.is_valid()) + self.assertEqual( + formset.errors, [{"slug": ["Product with this Slug already exists."]}] + ) + + def test_modelformset_validate_max_flag(self): + # If validate_max is set and max_num is less than TOTAL_FORMS in the + # data, then throw an exception. MAX_NUM_FORMS in the data is + # irrelevant here (it's output as a hint for the client but its + # value in the returned data is not checked) + + data = { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "2", # should be ignored + "form-0-price": "12.00", + "form-0-quantity": "1", + "form-1-price": "24.00", + "form-1-quantity": "2", + } + + FormSet = modelformset_factory( + Price, fields="__all__", extra=1, max_num=1, validate_max=True + ) + formset = FormSet(data) + self.assertFalse(formset.is_valid()) + self.assertEqual(formset.non_form_errors(), ["Please submit at most 1 form."]) + + # Now test the same thing without the validate_max flag to ensure + # default behavior is unchanged + FormSet = modelformset_factory(Price, fields="__all__", extra=1, max_num=1) + formset = FormSet(data) + self.assertTrue(formset.is_valid()) + + def test_modelformset_min_num_equals_max_num_less_than(self): + data = { + "form-TOTAL_FORMS": "3", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "2", + "form-0-slug": "car-red", + "form-1-slug": "car-blue", + "form-2-slug": "car-black", + } + FormSet = modelformset_factory( + Product, + fields="__all__", + extra=1, + max_num=2, + validate_max=True, + min_num=2, + validate_min=True, + ) + formset = FormSet(data) + self.assertFalse(formset.is_valid()) + self.assertEqual(formset.non_form_errors(), ["Please submit at most 2 forms."]) + + def test_modelformset_min_num_equals_max_num_more_than(self): + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "2", + "form-0-slug": "car-red", + } + FormSet = modelformset_factory( + Product, + fields="__all__", + extra=1, + max_num=2, + validate_max=True, + min_num=2, + validate_min=True, + ) + formset = FormSet(data) + self.assertFalse(formset.is_valid()) + self.assertEqual(formset.non_form_errors(), ["Please submit at least 2 forms."]) + + def test_unique_together_validation(self): + FormSet = modelformset_factory(Price, fields="__all__", extra=1) + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "", + "form-0-price": "12.00", + "form-0-quantity": "1", + } + formset = FormSet(data) + self.assertTrue(formset.is_valid()) + saved = formset.save() + self.assertEqual(len(saved), 1) + (price1,) = saved + self.assertEqual(price1.price, Decimal("12.00")) + self.assertEqual(price1.quantity, 1) + + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "", + "form-0-price": "12.00", + "form-0-quantity": "1", + } + formset = FormSet(data) + self.assertFalse(formset.is_valid()) + self.assertEqual( + formset.errors, + [{"__all__": ["Price with this Price and Quantity already exists."]}], + ) + + def test_unique_together_with_inlineformset_factory(self): + # Also see bug #8882. + + repository = Repository.objects.create(name="Test Repo") + FormSet = inlineformset_factory(Repository, Revision, extra=1, fields="__all__") + data = { + "revision_set-TOTAL_FORMS": "1", + "revision_set-INITIAL_FORMS": "0", + "revision_set-MAX_NUM_FORMS": "", + "revision_set-0-repository": repository.pk, + "revision_set-0-revision": "146239817507f148d448db38840db7c3cbf47c76", + "revision_set-0-DELETE": "", + } + formset = FormSet(data, instance=repository) + self.assertTrue(formset.is_valid()) + saved = formset.save() + self.assertEqual(len(saved), 1) + (revision1,) = saved + self.assertEqual(revision1.repository, repository) + self.assertEqual(revision1.revision, "146239817507f148d448db38840db7c3cbf47c76") + + # attempt to save the same revision against the same repo. + data = { + "revision_set-TOTAL_FORMS": "1", + "revision_set-INITIAL_FORMS": "0", + "revision_set-MAX_NUM_FORMS": "", + "revision_set-0-repository": repository.pk, + "revision_set-0-revision": "146239817507f148d448db38840db7c3cbf47c76", + "revision_set-0-DELETE": "", + } + formset = FormSet(data, instance=repository) + self.assertFalse(formset.is_valid()) + self.assertEqual( + formset.errors, + [ + { + "__all__": [ + "Revision with this Repository and Revision already exists." + ] + } + ], + ) + + # unique_together with inlineformset_factory with overridden form fields + # Also see #9494 + + FormSet = inlineformset_factory( + Repository, Revision, fields=("revision",), extra=1 + ) + data = { + "revision_set-TOTAL_FORMS": "1", + "revision_set-INITIAL_FORMS": "0", + "revision_set-MAX_NUM_FORMS": "", + "revision_set-0-repository": repository.pk, + "revision_set-0-revision": "146239817507f148d448db38840db7c3cbf47c76", + "revision_set-0-DELETE": "", + } + formset = FormSet(data, instance=repository) + self.assertFalse(formset.is_valid()) + + def test_callable_defaults(self): + # Use of callable defaults (see bug #7975). + + person = Person.objects.create(name="Ringo") + FormSet = inlineformset_factory( + Person, Membership, can_delete=False, extra=1, fields="__all__" + ) + formset = FormSet(instance=person) + + # Django will render a hidden field for model fields that have a callable + # default. This is required to ensure the value is tested for change correctly + # when determine what extra forms have changed to save. + + self.assertEqual(len(formset.forms), 1) # this formset only has one form + form = formset.forms[0] + now = form.fields["date_joined"].initial() + result = form.as_p() + result = re.sub( + r"[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}(?:\.[0-9]+)?", + "__DATETIME__", + result, + ) + self.assertHTMLEqual( + result, + '

    ' + '' + '

    ' + '

    ' + '' + '' + '

    ' % person.id, + ) + + # test for validation with callable defaults. Validations rely on hidden fields + + data = { + "membership_set-TOTAL_FORMS": "1", + "membership_set-INITIAL_FORMS": "0", + "membership_set-MAX_NUM_FORMS": "", + "membership_set-0-date_joined": now.strftime("%Y-%m-%d %H:%M:%S"), + "initial-membership_set-0-date_joined": now.strftime("%Y-%m-%d %H:%M:%S"), + "membership_set-0-karma": "", + } + formset = FormSet(data, instance=person) + self.assertTrue(formset.is_valid()) + + # now test for when the data changes + + one_day_later = now + datetime.timedelta(days=1) + filled_data = { + "membership_set-TOTAL_FORMS": "1", + "membership_set-INITIAL_FORMS": "0", + "membership_set-MAX_NUM_FORMS": "", + "membership_set-0-date_joined": one_day_later.strftime("%Y-%m-%d %H:%M:%S"), + "initial-membership_set-0-date_joined": now.strftime("%Y-%m-%d %H:%M:%S"), + "membership_set-0-karma": "", + } + formset = FormSet(filled_data, instance=person) + self.assertFalse(formset.is_valid()) + + # now test with split datetime fields + + class MembershipForm(forms.ModelForm): + date_joined = forms.SplitDateTimeField(initial=now) + + class Meta: + model = Membership + fields = "__all__" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.fields["date_joined"].widget = forms.SplitDateTimeWidget() + + FormSet = inlineformset_factory( + Person, + Membership, + form=MembershipForm, + can_delete=False, + extra=1, + fields="__all__", + ) + data = { + "membership_set-TOTAL_FORMS": "1", + "membership_set-INITIAL_FORMS": "0", + "membership_set-MAX_NUM_FORMS": "", + "membership_set-0-date_joined_0": now.strftime("%Y-%m-%d"), + "membership_set-0-date_joined_1": now.strftime("%H:%M:%S"), + "initial-membership_set-0-date_joined": now.strftime("%Y-%m-%d %H:%M:%S"), + "membership_set-0-karma": "", + } + formset = FormSet(data, instance=person) + self.assertTrue(formset.is_valid()) + + def test_inlineformset_factory_with_null_fk(self): + # inlineformset_factory tests with fk having null=True. see #9462. + # create some data that will exhibit the issue + team = Team.objects.create(name="Red Vipers") + Player(name="Timmy").save() + Player(name="Bobby", team=team).save() + + PlayerInlineFormSet = inlineformset_factory(Team, Player, fields="__all__") + formset = PlayerInlineFormSet() + self.assertQuerySetEqual(formset.get_queryset(), []) + + formset = PlayerInlineFormSet(instance=team) + players = formset.get_queryset() + self.assertEqual(len(players), 1) + (player1,) = players + self.assertEqual(player1.team, team) + self.assertEqual(player1.name, "Bobby") + + def test_inlineformset_with_arrayfield(self): + class SimpleArrayField(forms.CharField): + """A proxy for django.contrib.postgres.forms.SimpleArrayField.""" + + def to_python(self, value): + value = super().to_python(value) + return value.split(",") if value else [] + + class BookForm(forms.ModelForm): + title = SimpleArrayField() + + class Meta: + model = Book + fields = ("title",) + + BookFormSet = inlineformset_factory(Author, Book, form=BookForm) + data = { + "book_set-TOTAL_FORMS": "3", + "book_set-INITIAL_FORMS": "0", + "book_set-MAX_NUM_FORMS": "", + "book_set-0-title": "test1,test2", + "book_set-1-title": "test1,test2", + "book_set-2-title": "test3,test4", + } + author = Author.objects.create(name="test") + formset = BookFormSet(data, instance=author) + self.assertEqual( + formset.errors, + [{}, {"__all__": ["Please correct the duplicate values below."]}, {}], + ) + + def test_model_formset_with_custom_pk(self): + # a formset for a Model that has a custom primary key that still needs to be + # added to the formset automatically + FormSet = modelformset_factory( + ClassyMexicanRestaurant, fields=["tacos_are_yummy"] + ) + self.assertEqual( + sorted(FormSet().forms[0].fields), ["tacos_are_yummy", "the_restaurant"] + ) + + def test_model_formset_with_initial_model_instance(self): + # has_changed should compare model instance and primary key + # see #18898 + FormSet = modelformset_factory(Poem, fields="__all__") + john_milton = Poet(name="John Milton") + john_milton.save() + data = { + "form-TOTAL_FORMS": 1, + "form-INITIAL_FORMS": 0, + "form-MAX_NUM_FORMS": "", + "form-0-name": "", + "form-0-poet": str(john_milton.id), + } + formset = FormSet(initial=[{"poet": john_milton}], data=data) + self.assertFalse(formset.extra_forms[0].has_changed()) + + def test_model_formset_with_initial_queryset(self): + # has_changed should work with queryset and list of pk's + # see #18898 + FormSet = modelformset_factory(AuthorMeeting, fields="__all__") + Author.objects.create(pk=1, name="Charles Baudelaire") + data = { + "form-TOTAL_FORMS": 1, + "form-INITIAL_FORMS": 0, + "form-MAX_NUM_FORMS": "", + "form-0-name": "", + "form-0-created": "", + "form-0-authors": list(Author.objects.values_list("id", flat=True)), + } + formset = FormSet(initial=[{"authors": Author.objects.all()}], data=data) + self.assertFalse(formset.extra_forms[0].has_changed()) + + def test_prevent_duplicates_from_with_the_same_formset(self): + FormSet = modelformset_factory(Product, fields="__all__", extra=2) + data = { + "form-TOTAL_FORMS": 2, + "form-INITIAL_FORMS": 0, + "form-MAX_NUM_FORMS": "", + "form-0-slug": "red_car", + "form-1-slug": "red_car", + } + formset = FormSet(data) + self.assertFalse(formset.is_valid()) + self.assertEqual( + formset._non_form_errors, ["Please correct the duplicate data for slug."] + ) + + FormSet = modelformset_factory(Price, fields="__all__", extra=2) + data = { + "form-TOTAL_FORMS": 2, + "form-INITIAL_FORMS": 0, + "form-MAX_NUM_FORMS": "", + "form-0-price": "25", + "form-0-quantity": "7", + "form-1-price": "25", + "form-1-quantity": "7", + } + formset = FormSet(data) + self.assertFalse(formset.is_valid()) + self.assertEqual( + formset._non_form_errors, + [ + "Please correct the duplicate data for price and quantity, which must " + "be unique." + ], + ) + + # Only the price field is specified, this should skip any unique + # checks since the unique_together is not fulfilled. This will fail + # with a KeyError if broken. + FormSet = modelformset_factory(Price, fields=("price",), extra=2) + data = { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "", + "form-0-price": "24", + "form-1-price": "24", + } + formset = FormSet(data) + self.assertTrue(formset.is_valid()) + + FormSet = inlineformset_factory(Author, Book, extra=0, fields="__all__") + author = Author.objects.create(pk=1, name="Charles Baudelaire") + Book.objects.create(pk=1, author=author, title="Les Paradis Artificiels") + Book.objects.create(pk=2, author=author, title="Les Fleurs du Mal") + Book.objects.create(pk=3, author=author, title="Flowers of Evil") + + book_ids = author.book_set.order_by("id").values_list("id", flat=True) + data = { + "book_set-TOTAL_FORMS": "2", + "book_set-INITIAL_FORMS": "2", + "book_set-MAX_NUM_FORMS": "", + "book_set-0-title": "The 2008 Election", + "book_set-0-author": str(author.id), + "book_set-0-id": str(book_ids[0]), + "book_set-1-title": "The 2008 Election", + "book_set-1-author": str(author.id), + "book_set-1-id": str(book_ids[1]), + } + formset = FormSet(data=data, instance=author) + self.assertFalse(formset.is_valid()) + self.assertEqual( + formset._non_form_errors, ["Please correct the duplicate data for title."] + ) + self.assertEqual( + formset.errors, + [{}, {"__all__": ["Please correct the duplicate values below."]}], + ) + + FormSet = modelformset_factory(Post, fields="__all__", extra=2) + data = { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "", + "form-0-title": "blah", + "form-0-slug": "Morning", + "form-0-subtitle": "foo", + "form-0-posted": "2009-01-01", + "form-1-title": "blah", + "form-1-slug": "Morning in Prague", + "form-1-subtitle": "rawr", + "form-1-posted": "2009-01-01", + } + formset = FormSet(data) + self.assertFalse(formset.is_valid()) + self.assertEqual( + formset._non_form_errors, + [ + "Please correct the duplicate data for title which must be unique for " + "the date in posted." + ], + ) + self.assertEqual( + formset.errors, + [{}, {"__all__": ["Please correct the duplicate values below."]}], + ) + + data = { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "", + "form-0-title": "foo", + "form-0-slug": "Morning in Prague", + "form-0-subtitle": "foo", + "form-0-posted": "2009-01-01", + "form-1-title": "blah", + "form-1-slug": "Morning in Prague", + "form-1-subtitle": "rawr", + "form-1-posted": "2009-08-02", + } + formset = FormSet(data) + self.assertFalse(formset.is_valid()) + self.assertEqual( + formset._non_form_errors, + [ + "Please correct the duplicate data for slug which must be unique for " + "the year in posted." + ], + ) + + data = { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "", + "form-0-title": "foo", + "form-0-slug": "Morning in Prague", + "form-0-subtitle": "rawr", + "form-0-posted": "2008-08-01", + "form-1-title": "blah", + "form-1-slug": "Prague", + "form-1-subtitle": "rawr", + "form-1-posted": "2009-08-02", + } + formset = FormSet(data) + self.assertFalse(formset.is_valid()) + self.assertEqual( + formset._non_form_errors, + [ + "Please correct the duplicate data for subtitle which must be unique " + "for the month in posted." + ], + ) + + def test_prevent_change_outer_model_and_create_invalid_data(self): + author = Author.objects.create(name="Charles") + other_author = Author.objects.create(name="Walt") + AuthorFormSet = modelformset_factory(Author, fields="__all__") + data = { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "2", + "form-MAX_NUM_FORMS": "", + "form-0-id": str(author.id), + "form-0-name": "Charles", + "form-1-id": str(other_author.id), # A model not in the formset's queryset. + "form-1-name": "Changed name", + } + # This formset is only for Walt Whitman and shouldn't accept data for + # other_author. + formset = AuthorFormSet( + data=data, queryset=Author.objects.filter(id__in=(author.id,)) + ) + self.assertTrue(formset.is_valid()) + formset.save() + # The name of other_author shouldn't be changed and new models aren't + # created. + self.assertSequenceEqual(Author.objects.all(), [author, other_author]) + + def test_validation_without_id(self): + AuthorFormSet = modelformset_factory(Author, fields="__all__") + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "1", + "form-MAX_NUM_FORMS": "", + "form-0-name": "Charles", + } + formset = AuthorFormSet(data) + self.assertEqual( + formset.errors, + [{"id": ["This field is required."]}], + ) + + def test_validation_with_child_model_without_id(self): + BetterAuthorFormSet = modelformset_factory(BetterAuthor, fields="__all__") + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "1", + "form-MAX_NUM_FORMS": "", + "form-0-name": "Charles", + "form-0-write_speed": "10", + } + formset = BetterAuthorFormSet(data) + self.assertEqual( + formset.errors, + [{"author_ptr": ["This field is required."]}], + ) + + def test_validation_with_invalid_id(self): + AuthorFormSet = modelformset_factory(Author, fields="__all__") + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "1", + "form-MAX_NUM_FORMS": "", + "form-0-id": "abc", + "form-0-name": "Charles", + } + formset = AuthorFormSet(data) + self.assertEqual( + formset.errors, + [ + { + "id": [ + "Select a valid choice. That choice is not one of the " + "available choices." + ] + } + ], + ) + + def test_validation_with_nonexistent_id(self): + AuthorFormSet = modelformset_factory(Author, fields="__all__") + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "1", + "form-MAX_NUM_FORMS": "", + "form-0-id": "12345", + "form-0-name": "Charles", + } + formset = AuthorFormSet(data) + self.assertEqual( + formset.errors, + [ + { + "id": [ + "Select a valid choice. That choice is not one of the " + "available choices." + ] + } + ], + ) + + def test_initial_form_count_empty_data(self): + AuthorFormSet = modelformset_factory(Author, fields="__all__") + formset = AuthorFormSet({}) + self.assertEqual(formset.initial_form_count(), 0) + + def test_edit_only(self): + charles = Author.objects.create(name="Charles Baudelaire") + AuthorFormSet = modelformset_factory(Author, fields="__all__", edit_only=True) + data = { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "0", + "form-0-name": "Arthur Rimbaud", + "form-1-name": "Walt Whitman", + } + formset = AuthorFormSet(data) + self.assertIs(formset.is_valid(), True) + formset.save() + self.assertSequenceEqual(Author.objects.all(), [charles]) + data = { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "1", + "form-MAX_NUM_FORMS": "0", + "form-0-id": charles.pk, + "form-0-name": "Arthur Rimbaud", + "form-1-name": "Walt Whitman", + } + formset = AuthorFormSet(data) + self.assertIs(formset.is_valid(), True) + formset.save() + charles.refresh_from_db() + self.assertEqual(charles.name, "Arthur Rimbaud") + self.assertSequenceEqual(Author.objects.all(), [charles]) + + def test_edit_only_inlineformset_factory(self): + charles = Author.objects.create(name="Charles Baudelaire") + book = Book.objects.create(author=charles, title="Les Paradis Artificiels") + AuthorFormSet = inlineformset_factory( + Author, + Book, + can_delete=False, + fields="__all__", + edit_only=True, + ) + data = { + "book_set-TOTAL_FORMS": "4", + "book_set-INITIAL_FORMS": "1", + "book_set-MAX_NUM_FORMS": "0", + "book_set-0-id": book.pk, + "book_set-0-title": "Les Fleurs du Mal", + "book_set-0-author": charles.pk, + "book_set-1-title": "Flowers of Evil", + "book_set-1-author": charles.pk, + } + formset = AuthorFormSet(data, instance=charles) + self.assertIs(formset.is_valid(), True) + formset.save() + book.refresh_from_db() + self.assertEqual(book.title, "Les Fleurs du Mal") + self.assertSequenceEqual(Book.objects.all(), [book]) + + def test_edit_only_object_outside_of_queryset(self): + charles = Author.objects.create(name="Charles Baudelaire") + walt = Author.objects.create(name="Walt Whitman") + data = { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "1", + "form-0-id": walt.pk, + "form-0-name": "Parth Patil", + } + AuthorFormSet = modelformset_factory(Author, fields="__all__", edit_only=True) + formset = AuthorFormSet(data, queryset=Author.objects.filter(pk=charles.pk)) + self.assertIs(formset.is_valid(), True) + formset.save() + self.assertCountEqual(Author.objects.all(), [charles, walt]) + + def test_edit_only_formset_factory_with_basemodelformset(self): + charles = Author.objects.create(name="Charles Baudelaire") + + class AuthorForm(forms.ModelForm): + class Meta: + model = Author + fields = "__all__" + + class BaseAuthorFormSet(BaseModelFormSet): + def __init__(self, *args, **kwargs): + self.model = Author + super().__init__(*args, **kwargs) + + AuthorFormSet = formset_factory(AuthorForm, formset=BaseAuthorFormSet) + data = { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "1", + "form-MAX_NUM_FORMS": "0", + "form-0-id": charles.pk, + "form-0-name": "Shawn Dong", + "form-1-name": "Walt Whitman", + } + formset = AuthorFormSet(data) + self.assertIs(formset.is_valid(), True) + formset.save() + self.assertEqual(Author.objects.count(), 2) + charles.refresh_from_db() + self.assertEqual(charles.name, "Shawn Dong") + self.assertEqual(Author.objects.count(), 2) + + +class TestModelFormsetOverridesTroughFormMeta(TestCase): + def test_modelformset_factory_widgets(self): + widgets = {"name": forms.TextInput(attrs={"class": "poet"})} + PoetFormSet = modelformset_factory(Poet, fields="__all__", widgets=widgets) + form = PoetFormSet.form() + self.assertHTMLEqual( + str(form["name"]), + '", + ) + + def test_inlineformset_factory_widgets(self): + widgets = {"title": forms.TextInput(attrs={"class": "book"})} + BookFormSet = inlineformset_factory( + Author, Book, widgets=widgets, fields="__all__" + ) + form = BookFormSet.form() + self.assertHTMLEqual( + str(form["title"]), + '', + ) + + def test_modelformset_factory_labels_overrides(self): + BookFormSet = modelformset_factory( + Book, fields="__all__", labels={"title": "Name"} + ) + form = BookFormSet.form() + self.assertHTMLEqual( + form["title"].label_tag(), '' + ) + self.assertHTMLEqual( + form["title"].legend_tag(), + 'Name:', + ) + + def test_inlineformset_factory_labels_overrides(self): + BookFormSet = inlineformset_factory( + Author, Book, fields="__all__", labels={"title": "Name"} + ) + form = BookFormSet.form() + self.assertHTMLEqual( + form["title"].label_tag(), '' + ) + self.assertHTMLEqual( + form["title"].legend_tag(), + 'Name:', + ) + + def test_modelformset_factory_help_text_overrides(self): + BookFormSet = modelformset_factory( + Book, fields="__all__", help_texts={"title": "Choose carefully."} + ) + form = BookFormSet.form() + self.assertEqual(form["title"].help_text, "Choose carefully.") + + def test_inlineformset_factory_help_text_overrides(self): + BookFormSet = inlineformset_factory( + Author, Book, fields="__all__", help_texts={"title": "Choose carefully."} + ) + form = BookFormSet.form() + self.assertEqual(form["title"].help_text, "Choose carefully.") + + def test_modelformset_factory_error_messages_overrides(self): + author = Author.objects.create(pk=1, name="Charles Baudelaire") + BookFormSet = modelformset_factory( + Book, + fields="__all__", + error_messages={"title": {"max_length": "Title too long!!"}}, + ) + form = BookFormSet.form(data={"title": "Foo " * 30, "author": author.id}) + form.full_clean() + self.assertEqual(form.errors, {"title": ["Title too long!!"]}) + + def test_inlineformset_factory_error_messages_overrides(self): + author = Author.objects.create(pk=1, name="Charles Baudelaire") + BookFormSet = inlineformset_factory( + Author, + Book, + fields="__all__", + error_messages={"title": {"max_length": "Title too long!!"}}, + ) + form = BookFormSet.form(data={"title": "Foo " * 30, "author": author.id}) + form.full_clean() + self.assertEqual(form.errors, {"title": ["Title too long!!"]}) + + def test_modelformset_factory_field_class_overrides(self): + author = Author.objects.create(pk=1, name="Charles Baudelaire") + BookFormSet = modelformset_factory( + Book, + fields="__all__", + field_classes={ + "title": forms.SlugField, + }, + ) + form = BookFormSet.form(data={"title": "Foo " * 30, "author": author.id}) + self.assertIs(Book._meta.get_field("title").__class__, models.CharField) + self.assertIsInstance(form.fields["title"], forms.SlugField) + + def test_inlineformset_factory_field_class_overrides(self): + author = Author.objects.create(pk=1, name="Charles Baudelaire") + BookFormSet = inlineformset_factory( + Author, + Book, + fields="__all__", + field_classes={ + "title": forms.SlugField, + }, + ) + form = BookFormSet.form(data={"title": "Foo " * 30, "author": author.id}) + self.assertIs(Book._meta.get_field("title").__class__, models.CharField) + self.assertIsInstance(form.fields["title"], forms.SlugField) + + def test_modelformset_factory_absolute_max(self): + AuthorFormSet = modelformset_factory( + Author, fields="__all__", absolute_max=1500 + ) + data = { + "form-TOTAL_FORMS": "1501", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "0", + } + formset = AuthorFormSet(data=data) + self.assertIs(formset.is_valid(), False) + self.assertEqual(len(formset.forms), 1500) + self.assertEqual( + formset.non_form_errors(), + ["Please submit at most 1000 forms."], + ) + + def test_modelformset_factory_absolute_max_with_max_num(self): + AuthorFormSet = modelformset_factory( + Author, + fields="__all__", + max_num=20, + absolute_max=100, + ) + data = { + "form-TOTAL_FORMS": "101", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "0", + } + formset = AuthorFormSet(data=data) + self.assertIs(formset.is_valid(), False) + self.assertEqual(len(formset.forms), 100) + self.assertEqual( + formset.non_form_errors(), + ["Please submit at most 20 forms."], + ) + + def test_inlineformset_factory_absolute_max(self): + author = Author.objects.create(name="Charles Baudelaire") + BookFormSet = inlineformset_factory( + Author, + Book, + fields="__all__", + absolute_max=1500, + ) + data = { + "book_set-TOTAL_FORMS": "1501", + "book_set-INITIAL_FORMS": "0", + "book_set-MAX_NUM_FORMS": "0", + } + formset = BookFormSet(data, instance=author) + self.assertIs(formset.is_valid(), False) + self.assertEqual(len(formset.forms), 1500) + self.assertEqual( + formset.non_form_errors(), + ["Please submit at most 1000 forms."], + ) + + def test_inlineformset_factory_absolute_max_with_max_num(self): + author = Author.objects.create(name="Charles Baudelaire") + BookFormSet = inlineformset_factory( + Author, + Book, + fields="__all__", + max_num=20, + absolute_max=100, + ) + data = { + "book_set-TOTAL_FORMS": "101", + "book_set-INITIAL_FORMS": "0", + "book_set-MAX_NUM_FORMS": "0", + } + formset = BookFormSet(data, instance=author) + self.assertIs(formset.is_valid(), False) + self.assertEqual(len(formset.forms), 100) + self.assertEqual( + formset.non_form_errors(), + ["Please submit at most 20 forms."], + ) + + def test_modelformset_factory_can_delete_extra(self): + AuthorFormSet = modelformset_factory( + Author, + fields="__all__", + can_delete=True, + can_delete_extra=True, + extra=2, + ) + formset = AuthorFormSet() + self.assertEqual(len(formset), 2) + self.assertIn("DELETE", formset.forms[0].fields) + self.assertIn("DELETE", formset.forms[1].fields) + + def test_modelformset_factory_disable_delete_extra(self): + AuthorFormSet = modelformset_factory( + Author, + fields="__all__", + can_delete=True, + can_delete_extra=False, + extra=2, + ) + formset = AuthorFormSet() + self.assertEqual(len(formset), 2) + self.assertNotIn("DELETE", formset.forms[0].fields) + self.assertNotIn("DELETE", formset.forms[1].fields) + + def test_inlineformset_factory_can_delete_extra(self): + BookFormSet = inlineformset_factory( + Author, + Book, + fields="__all__", + can_delete=True, + can_delete_extra=True, + extra=2, + ) + formset = BookFormSet() + self.assertEqual(len(formset), 2) + self.assertIn("DELETE", formset.forms[0].fields) + self.assertIn("DELETE", formset.forms[1].fields) + + def test_inlineformset_factory_can_not_delete_extra(self): + BookFormSet = inlineformset_factory( + Author, + Book, + fields="__all__", + can_delete=True, + can_delete_extra=False, + extra=2, + ) + formset = BookFormSet() + self.assertEqual(len(formset), 2) + self.assertNotIn("DELETE", formset.forms[0].fields) + self.assertNotIn("DELETE", formset.forms[1].fields) + + def test_inlineformset_factory_passes_renderer(self): + from django.forms.renderers import Jinja2 + + renderer = Jinja2() + BookFormSet = inlineformset_factory( + Author, + Book, + fields="__all__", + renderer=renderer, + ) + formset = BookFormSet() + self.assertEqual(formset.renderer, renderer) + + def test_modelformset_factory_passes_renderer(self): + from django.forms.renderers import Jinja2 + + renderer = Jinja2() + BookFormSet = modelformset_factory(Author, fields="__all__", renderer=renderer) + formset = BookFormSet() + self.assertEqual(formset.renderer, renderer) diff --git a/testbed/django__django/tests/model_formsets_regress/__init__.py b/testbed/django__django/tests/model_formsets_regress/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_formsets_regress/tests.py b/testbed/django__django/tests/model_formsets_regress/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..0ccc2c04901f37071e0aab48e176824f87a33bea --- /dev/null +++ b/testbed/django__django/tests/model_formsets_regress/tests.py @@ -0,0 +1,586 @@ +from django import forms +from django.forms.formsets import DELETION_FIELD_NAME, BaseFormSet +from django.forms.models import ( + BaseModelFormSet, + inlineformset_factory, + modelform_factory, + modelformset_factory, +) +from django.forms.utils import ErrorDict, ErrorList +from django.test import TestCase + +from .models import ( + Host, + Manager, + Network, + ProfileNetwork, + Restaurant, + User, + UserPreferences, + UserProfile, + UserSite, +) + + +class InlineFormsetTests(TestCase): + def test_formset_over_to_field(self): + """ + A formset over a ForeignKey with a to_field can be saved. + """ + Form = modelform_factory(User, fields="__all__") + FormSet = inlineformset_factory(User, UserSite, fields="__all__") + + # Instantiate the Form and FormSet to prove + # you can create a form with no data + form = Form() + form_set = FormSet(instance=User()) + + # Now create a new User and UserSite instance + data = { + "serial": "1", + "username": "apollo13", + "usersite_set-TOTAL_FORMS": "1", + "usersite_set-INITIAL_FORMS": "0", + "usersite_set-MAX_NUM_FORMS": "0", + "usersite_set-0-data": "10", + "usersite_set-0-user": "apollo13", + } + user = User() + form = Form(data) + if form.is_valid(): + user = form.save() + else: + self.fail("Errors found on form:%s" % form_set) + + form_set = FormSet(data, instance=user) + if form_set.is_valid(): + form_set.save() + usersite = UserSite.objects.values() + self.assertEqual(usersite[0]["data"], 10) + self.assertEqual(usersite[0]["user_id"], "apollo13") + else: + self.fail("Errors found on formset:%s" % form_set.errors) + + # Now update the UserSite instance + data = { + "usersite_set-TOTAL_FORMS": "1", + "usersite_set-INITIAL_FORMS": "1", + "usersite_set-MAX_NUM_FORMS": "0", + "usersite_set-0-id": str(usersite[0]["id"]), + "usersite_set-0-data": "11", + "usersite_set-0-user": "apollo13", + } + form_set = FormSet(data, instance=user) + if form_set.is_valid(): + form_set.save() + usersite = UserSite.objects.values() + self.assertEqual(usersite[0]["data"], 11) + self.assertEqual(usersite[0]["user_id"], "apollo13") + else: + self.fail("Errors found on formset:%s" % form_set.errors) + + # Now add a new UserSite instance + data = { + "usersite_set-TOTAL_FORMS": "2", + "usersite_set-INITIAL_FORMS": "1", + "usersite_set-MAX_NUM_FORMS": "0", + "usersite_set-0-id": str(usersite[0]["id"]), + "usersite_set-0-data": "11", + "usersite_set-0-user": "apollo13", + "usersite_set-1-data": "42", + "usersite_set-1-user": "apollo13", + } + form_set = FormSet(data, instance=user) + if form_set.is_valid(): + form_set.save() + usersite = UserSite.objects.values().order_by("data") + self.assertEqual(usersite[0]["data"], 11) + self.assertEqual(usersite[0]["user_id"], "apollo13") + self.assertEqual(usersite[1]["data"], 42) + self.assertEqual(usersite[1]["user_id"], "apollo13") + else: + self.fail("Errors found on formset:%s" % form_set.errors) + + def test_formset_over_inherited_model(self): + """ + A formset over a ForeignKey with a to_field can be saved. + """ + Form = modelform_factory(Restaurant, fields="__all__") + FormSet = inlineformset_factory(Restaurant, Manager, fields="__all__") + + # Instantiate the Form and FormSet to prove + # you can create a form with no data + form = Form() + form_set = FormSet(instance=Restaurant()) + + # Now create a new Restaurant and Manager instance + data = { + "name": "Guido's House of Pasta", + "manager_set-TOTAL_FORMS": "1", + "manager_set-INITIAL_FORMS": "0", + "manager_set-MAX_NUM_FORMS": "0", + "manager_set-0-name": "Guido Van Rossum", + } + restaurant = User() + form = Form(data) + if form.is_valid(): + restaurant = form.save() + else: + self.fail("Errors found on form:%s" % form_set) + + form_set = FormSet(data, instance=restaurant) + if form_set.is_valid(): + form_set.save() + manager = Manager.objects.values() + self.assertEqual(manager[0]["name"], "Guido Van Rossum") + else: + self.fail("Errors found on formset:%s" % form_set.errors) + + # Now update the Manager instance + data = { + "manager_set-TOTAL_FORMS": "1", + "manager_set-INITIAL_FORMS": "1", + "manager_set-MAX_NUM_FORMS": "0", + "manager_set-0-id": str(manager[0]["id"]), + "manager_set-0-name": "Terry Gilliam", + } + form_set = FormSet(data, instance=restaurant) + if form_set.is_valid(): + form_set.save() + manager = Manager.objects.values() + self.assertEqual(manager[0]["name"], "Terry Gilliam") + else: + self.fail("Errors found on formset:%s" % form_set.errors) + + # Now add a new Manager instance + data = { + "manager_set-TOTAL_FORMS": "2", + "manager_set-INITIAL_FORMS": "1", + "manager_set-MAX_NUM_FORMS": "0", + "manager_set-0-id": str(manager[0]["id"]), + "manager_set-0-name": "Terry Gilliam", + "manager_set-1-name": "John Cleese", + } + form_set = FormSet(data, instance=restaurant) + if form_set.is_valid(): + form_set.save() + manager = Manager.objects.values().order_by("name") + self.assertEqual(manager[0]["name"], "John Cleese") + self.assertEqual(manager[1]["name"], "Terry Gilliam") + else: + self.fail("Errors found on formset:%s" % form_set.errors) + + def test_inline_model_with_to_field(self): + """ + #13794 --- An inline model with a to_field of a formset with instance + has working relations. + """ + FormSet = inlineformset_factory(User, UserSite, exclude=("is_superuser",)) + + user = User.objects.create(username="guido", serial=1337) + UserSite.objects.create(user=user, data=10) + formset = FormSet(instance=user) + + # Testing the inline model's relation + self.assertEqual(formset[0].instance.user_id, "guido") + + def test_inline_model_with_primary_to_field(self): + """An inline model with a OneToOneField with to_field & primary key.""" + FormSet = inlineformset_factory( + User, UserPreferences, exclude=("is_superuser",) + ) + user = User.objects.create(username="guido", serial=1337) + UserPreferences.objects.create(user=user, favorite_number=10) + formset = FormSet(instance=user) + self.assertEqual(formset[0].fields["user"].initial, "guido") + + def test_inline_model_with_to_field_to_rel(self): + """ + #13794 --- An inline model with a to_field to a related field of a + formset with instance has working relations. + """ + FormSet = inlineformset_factory(UserProfile, ProfileNetwork, exclude=[]) + + user = User.objects.create(username="guido", serial=1337, pk=1) + self.assertEqual(user.pk, 1) + profile = UserProfile.objects.create(user=user, about="about", pk=2) + self.assertEqual(profile.pk, 2) + ProfileNetwork.objects.create(profile=profile, network=10, identifier=10) + formset = FormSet(instance=profile) + + # Testing the inline model's relation + self.assertEqual(formset[0].instance.profile_id, 1) + + def test_formset_with_none_instance(self): + "A formset with instance=None can be created. Regression for #11872" + Form = modelform_factory(User, fields="__all__") + FormSet = inlineformset_factory(User, UserSite, fields="__all__") + + # Instantiate the Form and FormSet to prove + # you can create a formset with an instance of None + Form(instance=None) + FormSet(instance=None) + + def test_empty_fields_on_modelformset(self): + """ + No fields passed to modelformset_factory() should result in no fields + on returned forms except for the id (#14119). + """ + UserFormSet = modelformset_factory(User, fields=()) + formset = UserFormSet() + for form in formset.forms: + self.assertIn("id", form.fields) + self.assertEqual(len(form.fields), 1) + + def test_save_as_new_with_new_inlines(self): + """ + Existing and new inlines are saved with save_as_new. + + Regression for #14938. + """ + efnet = Network.objects.create(name="EFNet") + host1 = Host.objects.create(hostname="irc.he.net", network=efnet) + + HostFormSet = inlineformset_factory(Network, Host, fields="__all__") + + # Add a new host, modify previous host, and save-as-new + data = { + "host_set-TOTAL_FORMS": "2", + "host_set-INITIAL_FORMS": "1", + "host_set-MAX_NUM_FORMS": "0", + "host_set-0-id": str(host1.id), + "host_set-0-hostname": "tranquility.hub.dal.net", + "host_set-1-hostname": "matrix.de.eu.dal.net", + } + + # To save a formset as new, it needs a new hub instance + dalnet = Network.objects.create(name="DALnet") + formset = HostFormSet(data, instance=dalnet, save_as_new=True) + + self.assertTrue(formset.is_valid()) + formset.save() + self.assertQuerySetEqual( + dalnet.host_set.order_by("hostname"), + Host.objects.filter( + hostname__in=[ + "matrix.de.eu.dal.net", + "tranquility.hub.dal.net", + ] + ).order_by("hostname"), + ) + + def test_initial_data(self): + user = User.objects.create(username="bibi", serial=1) + UserSite.objects.create(user=user, data=7) + FormSet = inlineformset_factory(User, UserSite, extra=2, fields="__all__") + + formset = FormSet(instance=user, initial=[{"data": 41}, {"data": 42}]) + self.assertEqual(formset.forms[0].initial["data"], 7) + self.assertEqual(formset.extra_forms[0].initial["data"], 41) + self.assertIn('value="42"', formset.extra_forms[1].as_p()) + + +class FormsetTests(TestCase): + def test_error_class(self): + """ + Test the type of Formset and Form error attributes + """ + Formset = modelformset_factory(User, fields="__all__") + data = { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "0", + "form-0-id": "", + "form-0-username": "apollo13", + "form-0-serial": "1", + "form-1-id": "", + "form-1-username": "apollo13", + "form-1-serial": "2", + } + formset = Formset(data) + # check if the returned error classes are correct + # note: formset.errors returns a list as documented + self.assertIsInstance(formset.errors, list) + self.assertIsInstance(formset.non_form_errors(), ErrorList) + for form in formset.forms: + self.assertIsInstance(form.errors, ErrorDict) + self.assertIsInstance(form.non_field_errors(), ErrorList) + + def test_initial_data(self): + User.objects.create(username="bibi", serial=1) + Formset = modelformset_factory(User, fields="__all__", extra=2) + formset = Formset(initial=[{"username": "apollo11"}, {"username": "apollo12"}]) + self.assertEqual(formset.forms[0].initial["username"], "bibi") + self.assertEqual(formset.extra_forms[0].initial["username"], "apollo11") + self.assertIn('value="apollo12"', formset.extra_forms[1].as_p()) + + def test_extraneous_query_is_not_run(self): + Formset = modelformset_factory(Network, fields="__all__") + data = { + "test-TOTAL_FORMS": "1", + "test-INITIAL_FORMS": "0", + "test-MAX_NUM_FORMS": "", + "test-0-name": "Random Place", + } + with self.assertNumQueries(1): + formset = Formset(data, prefix="test") + formset.save() + + +class CustomWidget(forms.widgets.TextInput): + pass + + +class UserSiteForm(forms.ModelForm): + class Meta: + model = UserSite + fields = "__all__" + widgets = { + "id": CustomWidget, + "data": CustomWidget, + } + localized_fields = ("data",) + + +class Callback: + def __init__(self): + self.log = [] + + def __call__(self, db_field, **kwargs): + self.log.append((db_field, kwargs)) + return db_field.formfield(**kwargs) + + +class FormfieldCallbackTests(TestCase): + """ + Regression for #13095 and #17683: Using base forms with widgets + defined in Meta should not raise errors and BaseModelForm should respect + the specified pk widget. + """ + + def test_inlineformset_factory_default(self): + Formset = inlineformset_factory( + User, UserSite, form=UserSiteForm, fields="__all__" + ) + form = Formset().forms[0] + self.assertIsInstance(form["id"].field.widget, CustomWidget) + self.assertIsInstance(form["data"].field.widget, CustomWidget) + self.assertFalse(form.fields["id"].localize) + self.assertTrue(form.fields["data"].localize) + + def test_modelformset_factory_default(self): + Formset = modelformset_factory(UserSite, form=UserSiteForm) + form = Formset().forms[0] + self.assertIsInstance(form["id"].field.widget, CustomWidget) + self.assertIsInstance(form["data"].field.widget, CustomWidget) + self.assertFalse(form.fields["id"].localize) + self.assertTrue(form.fields["data"].localize) + + def assertCallbackCalled(self, callback): + id_field, user_field, data_field = UserSite._meta.fields + expected_log = [ + (id_field, {"widget": CustomWidget}), + (user_field, {}), + (data_field, {"widget": CustomWidget, "localize": True}), + ] + self.assertEqual(callback.log, expected_log) + + def test_inlineformset_custom_callback(self): + callback = Callback() + inlineformset_factory( + User, + UserSite, + form=UserSiteForm, + formfield_callback=callback, + fields="__all__", + ) + self.assertCallbackCalled(callback) + + def test_modelformset_custom_callback(self): + callback = Callback() + modelformset_factory(UserSite, form=UserSiteForm, formfield_callback=callback) + self.assertCallbackCalled(callback) + + +class BaseCustomDeleteFormSet(BaseFormSet): + """ + A formset mix-in that lets a form decide if it's to be deleted. + Works for BaseFormSets. Also works for ModelFormSets with #14099 fixed. + + form.should_delete() is called. The formset delete field is also suppressed. + """ + + def add_fields(self, form, index): + super().add_fields(form, index) + self.can_delete = True + if DELETION_FIELD_NAME in form.fields: + del form.fields[DELETION_FIELD_NAME] + + def _should_delete_form(self, form): + return hasattr(form, "should_delete") and form.should_delete() + + +class FormfieldShouldDeleteFormTests(TestCase): + """ + BaseModelFormSet should use ModelFormSet method _should_delete_form. + """ + + class BaseCustomDeleteModelFormSet(BaseModelFormSet, BaseCustomDeleteFormSet): + """Model FormSet with CustomDelete MixIn""" + + class CustomDeleteUserForm(forms.ModelForm): + """A model form with a 'should_delete' method""" + + class Meta: + model = User + fields = "__all__" + + def should_delete(self): + """Delete form if odd serial.""" + return self.instance.serial % 2 != 0 + + NormalFormset = modelformset_factory( + User, form=CustomDeleteUserForm, can_delete=True + ) + DeleteFormset = modelformset_factory( + User, form=CustomDeleteUserForm, formset=BaseCustomDeleteModelFormSet + ) + + data = { + "form-TOTAL_FORMS": "4", + "form-INITIAL_FORMS": "0", + "form-MAX_NUM_FORMS": "4", + "form-0-username": "John", + "form-0-serial": "1", + "form-1-username": "Paul", + "form-1-serial": "2", + "form-2-username": "George", + "form-2-serial": "3", + "form-3-username": "Ringo", + "form-3-serial": "5", + } + + delete_all_ids = { + "form-0-DELETE": "1", + "form-1-DELETE": "1", + "form-2-DELETE": "1", + "form-3-DELETE": "1", + } + + def test_init_database(self): + """Add test data to database via formset""" + formset = self.NormalFormset(self.data) + self.assertTrue(formset.is_valid()) + self.assertEqual(len(formset.save()), 4) + + def test_no_delete(self): + """Verify base formset doesn't modify database""" + # reload database + self.test_init_database() + + # pass standard data dict & see none updated + data = dict(self.data) + data["form-INITIAL_FORMS"] = 4 + data.update( + { + "form-%d-id" % i: user.pk + for i, user in enumerate(User.objects.order_by("pk")) + } + ) + formset = self.NormalFormset(data, queryset=User.objects.all()) + self.assertTrue(formset.is_valid()) + self.assertEqual(len(formset.save()), 0) + self.assertEqual(len(User.objects.all()), 4) + + def test_all_delete(self): + """Verify base formset honors DELETE field""" + # reload database + self.test_init_database() + + # create data dict with all fields marked for deletion + data = dict(self.data) + data["form-INITIAL_FORMS"] = 4 + data.update( + {"form-%d-id" % i: user.pk for i, user in enumerate(User.objects.all())} + ) + data.update(self.delete_all_ids) + formset = self.NormalFormset(data, queryset=User.objects.all()) + self.assertTrue(formset.is_valid()) + self.assertEqual(len(formset.save()), 0) + self.assertEqual(len(User.objects.all()), 0) + + def test_custom_delete(self): + """Verify DeleteFormset ignores DELETE field and uses form method""" + # reload database + self.test_init_database() + + # Create formset with custom Delete function + # create data dict with all fields marked for deletion + data = dict(self.data) + data["form-INITIAL_FORMS"] = 4 + data.update( + { + "form-%d-id" % i: user.pk + for i, user in enumerate(User.objects.order_by("pk")) + } + ) + data.update(self.delete_all_ids) + formset = self.DeleteFormset(data, queryset=User.objects.all()) + + # Three with odd serial values were deleted. + self.assertTrue(formset.is_valid()) + self.assertEqual(len(formset.save()), 0) + self.assertEqual(User.objects.count(), 1) + + # No odd serial values left. + odd_serials = [user.serial for user in User.objects.all() if user.serial % 2] + self.assertEqual(len(odd_serials), 0) + + +class RedeleteTests(TestCase): + def test_resubmit(self): + u = User.objects.create(username="foo", serial=1) + us = UserSite.objects.create(user=u, data=7) + formset_cls = inlineformset_factory(User, UserSite, fields="__all__") + data = { + "serial": "1", + "username": "foo", + "usersite_set-TOTAL_FORMS": "1", + "usersite_set-INITIAL_FORMS": "1", + "usersite_set-MAX_NUM_FORMS": "1", + "usersite_set-0-id": str(us.pk), + "usersite_set-0-data": "7", + "usersite_set-0-user": "foo", + "usersite_set-0-DELETE": "1", + } + formset = formset_cls(data, instance=u) + self.assertTrue(formset.is_valid()) + formset.save() + self.assertEqual(UserSite.objects.count(), 0) + formset = formset_cls(data, instance=u) + # Even if the "us" object isn't in the DB any more, the form + # validates. + self.assertTrue(formset.is_valid()) + formset.save() + self.assertEqual(UserSite.objects.count(), 0) + + def test_delete_already_deleted(self): + u = User.objects.create(username="foo", serial=1) + us = UserSite.objects.create(user=u, data=7) + formset_cls = inlineformset_factory(User, UserSite, fields="__all__") + data = { + "serial": "1", + "username": "foo", + "usersite_set-TOTAL_FORMS": "1", + "usersite_set-INITIAL_FORMS": "1", + "usersite_set-MAX_NUM_FORMS": "1", + "usersite_set-0-id": str(us.pk), + "usersite_set-0-data": "7", + "usersite_set-0-user": "foo", + "usersite_set-0-DELETE": "1", + } + formset = formset_cls(data, instance=u) + us.delete() + self.assertTrue(formset.is_valid()) + formset.save() + self.assertEqual(UserSite.objects.count(), 0) diff --git a/testbed/django__django/tests/model_indexes/__init__.py b/testbed/django__django/tests/model_indexes/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_indexes/models.py b/testbed/django__django/tests/model_indexes/models.py new file mode 100644 index 0000000000000000000000000000000000000000..3bda6069047f1a6a5590a76013d9d2685b3ea595 --- /dev/null +++ b/testbed/django__django/tests/model_indexes/models.py @@ -0,0 +1,39 @@ +from django.db import models + + +class Book(models.Model): + title = models.CharField(max_length=50) + author = models.CharField(max_length=50) + pages = models.IntegerField(db_column="page_count") + shortcut = models.CharField(max_length=50, db_tablespace="idx_tbls") + isbn = models.CharField(max_length=50, db_tablespace="idx_tbls") + barcode = models.CharField(max_length=31) + + class Meta: + indexes = [ + models.Index(fields=["title"]), + models.Index(fields=["isbn", "id"]), + models.Index( + fields=["barcode"], name="%(app_label)s_%(class)s_barcode_idx" + ), + ] + + +class AbstractModel(models.Model): + name = models.CharField(max_length=50) + shortcut = models.CharField(max_length=3) + + class Meta: + abstract = True + indexes = [ + models.Index(fields=["name"]), + models.Index(fields=["shortcut"], name="%(app_label)s_%(class)s_idx"), + ] + + +class ChildModel1(AbstractModel): + pass + + +class ChildModel2(AbstractModel): + pass diff --git a/testbed/django__django/tests/model_indexes/tests.py b/testbed/django__django/tests/model_indexes/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..0c8378f6241c8f544a7ef7cb1fef2204e7d937fc --- /dev/null +++ b/testbed/django__django/tests/model_indexes/tests.py @@ -0,0 +1,372 @@ +from unittest import mock + +from django.conf import settings +from django.db import connection, models +from django.db.models.functions import Lower, Upper +from django.test import SimpleTestCase, TestCase, override_settings, skipUnlessDBFeature +from django.test.utils import isolate_apps + +from .models import Book, ChildModel1, ChildModel2 + + +class SimpleIndexesTests(SimpleTestCase): + def test_suffix(self): + self.assertEqual(models.Index.suffix, "idx") + + def test_repr(self): + index = models.Index(fields=["title"]) + named_index = models.Index(fields=["title"], name="title_idx") + multi_col_index = models.Index(fields=["title", "author"]) + partial_index = models.Index( + fields=["title"], name="long_books_idx", condition=models.Q(pages__gt=400) + ) + covering_index = models.Index( + fields=["title"], + name="include_idx", + include=["author", "pages"], + ) + opclasses_index = models.Index( + fields=["headline", "body"], + name="opclasses_idx", + opclasses=["varchar_pattern_ops", "text_pattern_ops"], + ) + func_index = models.Index(Lower("title"), "subtitle", name="book_func_idx") + tablespace_index = models.Index( + fields=["title"], + db_tablespace="idx_tbls", + name="book_tablespace_idx", + ) + self.assertEqual(repr(index), "") + self.assertEqual( + repr(named_index), + "", + ) + self.assertEqual(repr(multi_col_index), "") + self.assertEqual( + repr(partial_index), + "", + ) + self.assertEqual( + repr(covering_index), + "", + ) + self.assertEqual( + repr(opclasses_index), + "", + ) + self.assertEqual( + repr(func_index), + "", + ) + self.assertEqual( + repr(tablespace_index), + "", + ) + + def test_eq(self): + index = models.Index(fields=["title"]) + same_index = models.Index(fields=["title"]) + another_index = models.Index(fields=["title", "author"]) + index.model = Book + same_index.model = Book + another_index.model = Book + self.assertEqual(index, same_index) + self.assertEqual(index, mock.ANY) + self.assertNotEqual(index, another_index) + + def test_eq_func(self): + index = models.Index(Lower("title"), models.F("author"), name="book_func_idx") + same_index = models.Index(Lower("title"), "author", name="book_func_idx") + another_index = models.Index(Lower("title"), name="book_func_idx") + self.assertEqual(index, same_index) + self.assertEqual(index, mock.ANY) + self.assertNotEqual(index, another_index) + + def test_index_fields_type(self): + with self.assertRaisesMessage( + ValueError, "Index.fields must be a list or tuple." + ): + models.Index(fields="title") + + def test_index_fields_strings(self): + msg = "Index.fields must contain only strings with field names." + with self.assertRaisesMessage(ValueError, msg): + models.Index(fields=[models.F("title")]) + + def test_fields_tuple(self): + self.assertEqual(models.Index(fields=("title",)).fields, ["title"]) + + def test_requires_field_or_expression(self): + msg = "At least one field or expression is required to define an index." + with self.assertRaisesMessage(ValueError, msg): + models.Index() + + def test_expressions_and_fields_mutually_exclusive(self): + msg = "Index.fields and expressions are mutually exclusive." + with self.assertRaisesMessage(ValueError, msg): + models.Index(Upper("foo"), fields=["field"]) + + def test_opclasses_requires_index_name(self): + with self.assertRaisesMessage( + ValueError, "An index must be named to use opclasses." + ): + models.Index(opclasses=["jsonb_path_ops"]) + + def test_opclasses_requires_list_or_tuple(self): + with self.assertRaisesMessage( + ValueError, "Index.opclasses must be a list or tuple." + ): + models.Index( + name="test_opclass", fields=["field"], opclasses="jsonb_path_ops" + ) + + def test_opclasses_and_fields_same_length(self): + msg = "Index.fields and Index.opclasses must have the same number of elements." + with self.assertRaisesMessage(ValueError, msg): + models.Index( + name="test_opclass", + fields=["field", "other"], + opclasses=["jsonb_path_ops"], + ) + + def test_condition_requires_index_name(self): + with self.assertRaisesMessage( + ValueError, "An index must be named to use condition." + ): + models.Index(condition=models.Q(pages__gt=400)) + + def test_expressions_requires_index_name(self): + msg = "An index must be named to use expressions." + with self.assertRaisesMessage(ValueError, msg): + models.Index(Lower("field")) + + def test_expressions_with_opclasses(self): + msg = ( + "Index.opclasses cannot be used with expressions. Use " + "django.contrib.postgres.indexes.OpClass() instead." + ) + with self.assertRaisesMessage(ValueError, msg): + models.Index( + Lower("field"), + name="test_func_opclass", + opclasses=["jsonb_path_ops"], + ) + + def test_condition_must_be_q(self): + with self.assertRaisesMessage( + ValueError, "Index.condition must be a Q instance." + ): + models.Index(condition="invalid", name="long_book_idx") + + def test_include_requires_list_or_tuple(self): + msg = "Index.include must be a list or tuple." + with self.assertRaisesMessage(ValueError, msg): + models.Index(name="test_include", fields=["field"], include="other") + + def test_include_requires_index_name(self): + msg = "A covering index must be named." + with self.assertRaisesMessage(ValueError, msg): + models.Index(fields=["field"], include=["other"]) + + def test_name_auto_generation(self): + index = models.Index(fields=["author"]) + index.set_name_with_model(Book) + self.assertEqual(index.name, "model_index_author_0f5565_idx") + + # '-' for DESC columns should be accounted for in the index name. + index = models.Index(fields=["-author"]) + index.set_name_with_model(Book) + self.assertEqual(index.name, "model_index_author_708765_idx") + + # fields may be truncated in the name. db_column is used for naming. + long_field_index = models.Index(fields=["pages"]) + long_field_index.set_name_with_model(Book) + self.assertEqual(long_field_index.name, "model_index_page_co_69235a_idx") + + # suffix can't be longer than 3 characters. + long_field_index.suffix = "suff" + msg = ( + "Index too long for multiple database support. Is self.suffix " + "longer than 3 characters?" + ) + with self.assertRaisesMessage(ValueError, msg): + long_field_index.set_name_with_model(Book) + + @isolate_apps("model_indexes") + def test_name_auto_generation_with_quoted_db_table(self): + class QuotedDbTable(models.Model): + name = models.CharField(max_length=50) + + class Meta: + db_table = '"t_quoted"' + + index = models.Index(fields=["name"]) + index.set_name_with_model(QuotedDbTable) + self.assertEqual(index.name, "t_quoted_name_e4ed1b_idx") + + def test_deconstruction(self): + index = models.Index(fields=["title"], db_tablespace="idx_tbls") + index.set_name_with_model(Book) + path, args, kwargs = index.deconstruct() + self.assertEqual(path, "django.db.models.Index") + self.assertEqual(args, ()) + self.assertEqual( + kwargs, + { + "fields": ["title"], + "name": "model_index_title_196f42_idx", + "db_tablespace": "idx_tbls", + }, + ) + + def test_deconstruct_with_condition(self): + index = models.Index( + name="big_book_index", + fields=["title"], + condition=models.Q(pages__gt=400), + ) + index.set_name_with_model(Book) + path, args, kwargs = index.deconstruct() + self.assertEqual(path, "django.db.models.Index") + self.assertEqual(args, ()) + self.assertEqual( + kwargs, + { + "fields": ["title"], + "name": "model_index_title_196f42_idx", + "condition": models.Q(pages__gt=400), + }, + ) + + def test_deconstruct_with_include(self): + index = models.Index( + name="book_include_idx", + fields=["title"], + include=["author"], + ) + index.set_name_with_model(Book) + path, args, kwargs = index.deconstruct() + self.assertEqual(path, "django.db.models.Index") + self.assertEqual(args, ()) + self.assertEqual( + kwargs, + { + "fields": ["title"], + "name": "model_index_title_196f42_idx", + "include": ("author",), + }, + ) + + def test_deconstruct_with_expressions(self): + index = models.Index(Upper("title"), name="book_func_idx") + path, args, kwargs = index.deconstruct() + self.assertEqual(path, "django.db.models.Index") + self.assertEqual(args, (Upper("title"),)) + self.assertEqual(kwargs, {"name": "book_func_idx"}) + + def test_clone(self): + index = models.Index(fields=["title"]) + new_index = index.clone() + self.assertIsNot(index, new_index) + self.assertEqual(index.fields, new_index.fields) + + def test_clone_with_expressions(self): + index = models.Index(Upper("title"), name="book_func_idx") + new_index = index.clone() + self.assertIsNot(index, new_index) + self.assertEqual(index.expressions, new_index.expressions) + + def test_name_set(self): + index_names = [index.name for index in Book._meta.indexes] + self.assertCountEqual( + index_names, + [ + "model_index_title_196f42_idx", + "model_index_isbn_34f975_idx", + "model_indexes_book_barcode_idx", + ], + ) + + def test_abstract_children(self): + index_names = [index.name for index in ChildModel1._meta.indexes] + self.assertEqual( + index_names, + ["model_index_name_440998_idx", "model_indexes_childmodel1_idx"], + ) + index_names = [index.name for index in ChildModel2._meta.indexes] + self.assertEqual( + index_names, + ["model_index_name_b6c374_idx", "model_indexes_childmodel2_idx"], + ) + + +@override_settings(DEFAULT_TABLESPACE=None) +class IndexesTests(TestCase): + @skipUnlessDBFeature("supports_tablespaces") + def test_db_tablespace(self): + editor = connection.schema_editor() + # Index with db_tablespace attribute. + for fields in [ + # Field with db_tablespace specified on model. + ["shortcut"], + # Field without db_tablespace specified on model. + ["author"], + # Multi-column with db_tablespaces specified on model. + ["shortcut", "isbn"], + # Multi-column without db_tablespace specified on model. + ["title", "author"], + ]: + with self.subTest(fields=fields): + index = models.Index(fields=fields, db_tablespace="idx_tbls2") + self.assertIn( + '"idx_tbls2"', str(index.create_sql(Book, editor)).lower() + ) + # Indexes without db_tablespace attribute. + for fields in [["author"], ["shortcut", "isbn"], ["title", "author"]]: + with self.subTest(fields=fields): + index = models.Index(fields=fields) + # The DEFAULT_INDEX_TABLESPACE setting can't be tested because + # it's evaluated when the model class is defined. As a + # consequence, @override_settings doesn't work. + if settings.DEFAULT_INDEX_TABLESPACE: + self.assertIn( + '"%s"' % settings.DEFAULT_INDEX_TABLESPACE, + str(index.create_sql(Book, editor)).lower(), + ) + else: + self.assertNotIn("TABLESPACE", str(index.create_sql(Book, editor))) + # Field with db_tablespace specified on the model and an index without + # db_tablespace. + index = models.Index(fields=["shortcut"]) + self.assertIn('"idx_tbls"', str(index.create_sql(Book, editor)).lower()) + + @skipUnlessDBFeature("supports_tablespaces") + def test_func_with_tablespace(self): + # Functional index with db_tablespace attribute. + index = models.Index( + Lower("shortcut").desc(), + name="functional_tbls", + db_tablespace="idx_tbls2", + ) + with connection.schema_editor() as editor: + sql = str(index.create_sql(Book, editor)) + self.assertIn(editor.quote_name("idx_tbls2"), sql) + # Functional index without db_tablespace attribute. + index = models.Index(Lower("shortcut").desc(), name="functional_no_tbls") + with connection.schema_editor() as editor: + sql = str(index.create_sql(Book, editor)) + # The DEFAULT_INDEX_TABLESPACE setting can't be tested because it's + # evaluated when the model class is defined. As a consequence, + # @override_settings doesn't work. + if settings.DEFAULT_INDEX_TABLESPACE: + self.assertIn( + editor.quote_name(settings.DEFAULT_INDEX_TABLESPACE), + sql, + ) + else: + self.assertNotIn("TABLESPACE", sql) diff --git a/testbed/django__django/tests/model_inheritance/__init__.py b/testbed/django__django/tests/model_inheritance/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_inheritance/models.py b/testbed/django__django/tests/model_inheritance/models.py new file mode 100644 index 0000000000000000000000000000000000000000..47aae186e03762c0915d22ee1a64c6ab7406213c --- /dev/null +++ b/testbed/django__django/tests/model_inheritance/models.py @@ -0,0 +1,214 @@ +""" +XX. Model inheritance + +Model inheritance exists in two varieties: + - abstract base classes which are a way of specifying common + information inherited by the subclasses. They don't exist as a separate + model. + - non-abstract base classes (the default), which are models in their own + right with their own database tables and everything. Their subclasses + have references back to them, created automatically. + +Both styles are demonstrated here. +""" +from django.db import models + +# +# Abstract base classes +# + + +class CommonInfo(models.Model): + name = models.CharField(max_length=50) + age = models.PositiveIntegerField() + + class Meta: + abstract = True + ordering = ["name"] + + def __str__(self): + return "%s %s" % (self.__class__.__name__, self.name) + + +class Worker(CommonInfo): + job = models.CharField(max_length=50) + + +class Student(CommonInfo): + school_class = models.CharField(max_length=10) + + class Meta: + pass + + +# +# Abstract base classes with related models +# + + +class Post(models.Model): + title = models.CharField(max_length=50) + + +class Attachment(models.Model): + post = models.ForeignKey( + Post, + models.CASCADE, + related_name="attached_%(class)s_set", + related_query_name="attached_%(app_label)s_%(class)ss", + ) + content = models.TextField() + + class Meta: + abstract = True + + +class Comment(Attachment): + is_spam = models.BooleanField(default=False) + + +class Link(Attachment): + url = models.URLField() + + +# +# Multi-table inheritance +# + + +class Chef(models.Model): + name = models.CharField(max_length=50) + + +class Place(models.Model): + name = models.CharField(max_length=50) + address = models.CharField(max_length=80) + + +class Rating(models.Model): + rating = models.IntegerField(null=True, blank=True) + + class Meta: + abstract = True + ordering = ["-rating"] + + +class Restaurant(Place, Rating): + serves_hot_dogs = models.BooleanField(default=False) + serves_pizza = models.BooleanField(default=False) + chef = models.ForeignKey(Chef, models.SET_NULL, null=True, blank=True) + + class Meta(Rating.Meta): + db_table = "my_restaurant" + + +class ItalianRestaurant(Restaurant): + serves_gnocchi = models.BooleanField(default=False) + + +class ItalianRestaurantCommonParent(ItalianRestaurant, Place): + place_ptr_two = models.OneToOneField( + Place, on_delete=models.CASCADE, parent_link=True + ) + + +class Supplier(Place): + customers = models.ManyToManyField(Restaurant, related_name="provider") + + +class CustomSupplier(Supplier): + pass + + +class ParkingLot(Place): + # An explicit link to the parent (we can control the attribute name). + parent = models.OneToOneField( + Place, models.CASCADE, primary_key=True, parent_link=True + ) + main_site = models.ForeignKey(Place, models.CASCADE, related_name="lot") + + +# +# Abstract base classes with related models where the sub-class has the +# same name in a different app and inherits from the same abstract base +# class. +# NOTE: The actual API tests for the following classes are in +# model_inheritance_same_model_name/models.py - They are defined +# here in order to have the name conflict between apps +# + + +class Title(models.Model): + title = models.CharField(max_length=50) + + +class NamedURL(models.Model): + title = models.ForeignKey( + Title, models.CASCADE, related_name="attached_%(app_label)s_%(class)s_set" + ) + url = models.URLField() + + class Meta: + abstract = True + + +class Mixin: + def __init__(self): + self.other_attr = 1 + super().__init__() + + +class MixinModel(models.Model, Mixin): + pass + + +class Base(models.Model): + titles = models.ManyToManyField(Title) + + +class SubBase(Base): + sub_id = models.IntegerField(primary_key=True) + + +class GrandParent(models.Model): + first_name = models.CharField(max_length=80) + last_name = models.CharField(max_length=80) + email = models.EmailField(unique=True) + place = models.ForeignKey(Place, models.CASCADE, null=True, related_name="+") + + class Meta: + # Ordering used by test_inherited_ordering_pk_desc. + ordering = ["-pk"] + unique_together = ("first_name", "last_name") + + +class Parent(GrandParent): + pass + + +class Child(Parent): + pass + + +class GrandChild(Child): + pass + + +class CommonAncestor(models.Model): + id = models.IntegerField(primary_key=True, default=1) + + +class FirstParent(CommonAncestor): + first_ancestor = models.OneToOneField( + CommonAncestor, models.CASCADE, primary_key=True, parent_link=True + ) + + +class SecondParent(CommonAncestor): + second_ancestor = models.OneToOneField( + CommonAncestor, models.CASCADE, primary_key=True, parent_link=True + ) + + +class CommonChild(FirstParent, SecondParent): + pass diff --git a/testbed/django__django/tests/model_inheritance/test_abstract_inheritance.py b/testbed/django__django/tests/model_inheritance/test_abstract_inheritance.py new file mode 100644 index 0000000000000000000000000000000000000000..24362292a1df1ddbd39e2d279bcbe39e8518a282 --- /dev/null +++ b/testbed/django__django/tests/model_inheritance/test_abstract_inheritance.py @@ -0,0 +1,449 @@ +from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation +from django.contrib.contenttypes.models import ContentType +from django.core.checks import Error +from django.core.exceptions import FieldDoesNotExist, FieldError +from django.db import models +from django.test import SimpleTestCase +from django.test.utils import isolate_apps + + +@isolate_apps("model_inheritance") +class AbstractInheritanceTests(SimpleTestCase): + def test_single_parent(self): + class AbstractBase(models.Model): + name = models.CharField(max_length=30) + + class Meta: + abstract = True + + class AbstractDescendant(AbstractBase): + name = models.CharField(max_length=50) + + class Meta: + abstract = True + + class DerivedChild(AbstractBase): + name = models.CharField(max_length=50) + + class DerivedGrandChild(AbstractDescendant): + pass + + self.assertEqual(AbstractDescendant._meta.get_field("name").max_length, 50) + self.assertEqual(DerivedChild._meta.get_field("name").max_length, 50) + self.assertEqual(DerivedGrandChild._meta.get_field("name").max_length, 50) + + def test_multiple_inheritance_allows_inherited_field(self): + """ + Single layer multiple inheritance is as expected, deriving the + inherited field from the first base. + """ + + class ParentA(models.Model): + name = models.CharField(max_length=255) + + class Meta: + abstract = True + + class ParentB(models.Model): + name = models.IntegerField() + + class Meta: + abstract = True + + class Child(ParentA, ParentB): + pass + + self.assertEqual(Child.check(), []) + inherited_field = Child._meta.get_field("name") + self.assertIsInstance(inherited_field, models.CharField) + self.assertEqual(inherited_field.max_length, 255) + + def test_diamond_shaped_multiple_inheritance_is_depth_first(self): + """ + In contrast to standard Python MRO, resolution of inherited fields is + strictly depth-first, rather than breadth-first in diamond-shaped cases. + + This is because a copy of the parent field descriptor is placed onto + the model class in ModelBase.__new__(), rather than the attribute + lookup going via bases. (It only **looks** like inheritance.) + + Here, Child inherits name from Root, rather than ParentB. + """ + + class Root(models.Model): + name = models.CharField(max_length=255) + + class Meta: + abstract = True + + class ParentA(Root): + class Meta: + abstract = True + + class ParentB(Root): + name = models.IntegerField() + + class Meta: + abstract = True + + class Child(ParentA, ParentB): + pass + + self.assertEqual(Child.check(), []) + inherited_field = Child._meta.get_field("name") + self.assertIsInstance(inherited_field, models.CharField) + self.assertEqual(inherited_field.max_length, 255) + + def test_target_field_may_be_pushed_down(self): + """ + Where the Child model needs to inherit a field from a different base + than that given by depth-first resolution, the target field can be + **pushed down** by being re-declared. + """ + + class Root(models.Model): + name = models.CharField(max_length=255) + + class Meta: + abstract = True + + class ParentA(Root): + class Meta: + abstract = True + + class ParentB(Root): + name = models.IntegerField() + + class Meta: + abstract = True + + class Child(ParentA, ParentB): + name = models.IntegerField() + + self.assertEqual(Child.check(), []) + inherited_field = Child._meta.get_field("name") + self.assertIsInstance(inherited_field, models.IntegerField) + + def test_multiple_inheritance_cannot_shadow_concrete_inherited_field(self): + class ConcreteParent(models.Model): + name = models.CharField(max_length=255) + + class AbstractParent(models.Model): + name = models.IntegerField() + + class Meta: + abstract = True + + class FirstChild(ConcreteParent, AbstractParent): + pass + + class AnotherChild(AbstractParent, ConcreteParent): + pass + + self.assertIsInstance(FirstChild._meta.get_field("name"), models.CharField) + self.assertEqual( + AnotherChild.check(), + [ + Error( + "The field 'name' clashes with the field 'name' " + "from model 'model_inheritance.concreteparent'.", + obj=AnotherChild._meta.get_field("name"), + id="models.E006", + ) + ], + ) + + def test_virtual_field(self): + class RelationModel(models.Model): + content_type = models.ForeignKey(ContentType, models.CASCADE) + object_id = models.PositiveIntegerField() + content_object = GenericForeignKey("content_type", "object_id") + + class RelatedModelAbstract(models.Model): + field = GenericRelation(RelationModel) + + class Meta: + abstract = True + + class ModelAbstract(models.Model): + field = models.CharField(max_length=100) + + class Meta: + abstract = True + + class OverrideRelatedModelAbstract(RelatedModelAbstract): + field = models.CharField(max_length=100) + + class ExtendModelAbstract(ModelAbstract): + field = GenericRelation(RelationModel) + + self.assertIsInstance( + OverrideRelatedModelAbstract._meta.get_field("field"), models.CharField + ) + self.assertIsInstance( + ExtendModelAbstract._meta.get_field("field"), GenericRelation + ) + + def test_cannot_override_indirect_abstract_field(self): + class AbstractBase(models.Model): + name = models.CharField(max_length=30) + + class Meta: + abstract = True + + class ConcreteDescendant(AbstractBase): + pass + + msg = ( + "Local field 'name' in class 'Descendant' clashes with field of " + "the same name from base class 'ConcreteDescendant'." + ) + with self.assertRaisesMessage(FieldError, msg): + + class Descendant(ConcreteDescendant): + name = models.IntegerField() + + def test_override_field_with_attr(self): + class AbstractBase(models.Model): + first_name = models.CharField(max_length=50) + last_name = models.CharField(max_length=50) + middle_name = models.CharField(max_length=30) + full_name = models.CharField(max_length=150) + + class Meta: + abstract = True + + class Descendant(AbstractBase): + middle_name = None + + def full_name(self): + return self.first_name + self.last_name + + msg = "Descendant has no field named %r" + with self.assertRaisesMessage(FieldDoesNotExist, msg % "middle_name"): + Descendant._meta.get_field("middle_name") + + with self.assertRaisesMessage(FieldDoesNotExist, msg % "full_name"): + Descendant._meta.get_field("full_name") + + def test_overriding_field_removed_by_concrete_model(self): + class AbstractModel(models.Model): + foo = models.CharField(max_length=30) + + class Meta: + abstract = True + + class RemovedAbstractModelField(AbstractModel): + foo = None + + class OverrideRemovedFieldByConcreteModel(RemovedAbstractModelField): + foo = models.CharField(max_length=50) + + self.assertEqual( + OverrideRemovedFieldByConcreteModel._meta.get_field("foo").max_length, 50 + ) + + def test_shadowed_fkey_id(self): + class Foo(models.Model): + pass + + class AbstractBase(models.Model): + foo = models.ForeignKey(Foo, models.CASCADE) + + class Meta: + abstract = True + + class Descendant(AbstractBase): + foo_id = models.IntegerField() + + self.assertEqual( + Descendant.check(), + [ + Error( + "The field 'foo_id' clashes with the field 'foo' " + "from model 'model_inheritance.descendant'.", + obj=Descendant._meta.get_field("foo_id"), + id="models.E006", + ) + ], + ) + + def test_shadow_related_name_when_set_to_none(self): + class AbstractBase(models.Model): + bar = models.IntegerField() + + class Meta: + abstract = True + + class Foo(AbstractBase): + bar = None + foo = models.IntegerField() + + class Bar(models.Model): + bar = models.ForeignKey(Foo, models.CASCADE, related_name="bar") + + self.assertEqual(Bar.check(), []) + + def test_reverse_foreign_key(self): + class AbstractBase(models.Model): + foo = models.CharField(max_length=100) + + class Meta: + abstract = True + + class Descendant(AbstractBase): + pass + + class Foo(models.Model): + foo = models.ForeignKey(Descendant, models.CASCADE, related_name="foo") + + self.assertEqual( + Foo._meta.get_field("foo").check(), + [ + Error( + "Reverse accessor 'Descendant.foo' for " + "'model_inheritance.Foo.foo' clashes with field name " + "'model_inheritance.Descendant.foo'.", + hint=( + "Rename field 'model_inheritance.Descendant.foo', or " + "add/change a related_name argument to the definition " + "for field 'model_inheritance.Foo.foo'." + ), + obj=Foo._meta.get_field("foo"), + id="fields.E302", + ), + Error( + "Reverse query name for 'model_inheritance.Foo.foo' " + "clashes with field name " + "'model_inheritance.Descendant.foo'.", + hint=( + "Rename field 'model_inheritance.Descendant.foo', or " + "add/change a related_name argument to the definition " + "for field 'model_inheritance.Foo.foo'." + ), + obj=Foo._meta.get_field("foo"), + id="fields.E303", + ), + ], + ) + + def test_multi_inheritance_field_clashes(self): + class AbstractBase(models.Model): + name = models.CharField(max_length=30) + + class Meta: + abstract = True + + class ConcreteBase(AbstractBase): + pass + + class AbstractDescendant(ConcreteBase): + class Meta: + abstract = True + + class ConcreteDescendant(AbstractDescendant): + name = models.CharField(max_length=100) + + self.assertEqual( + ConcreteDescendant.check(), + [ + Error( + "The field 'name' clashes with the field 'name' from " + "model 'model_inheritance.concretebase'.", + obj=ConcreteDescendant._meta.get_field("name"), + id="models.E006", + ) + ], + ) + + def test_override_one2one_relation_auto_field_clashes(self): + class ConcreteParent(models.Model): + name = models.CharField(max_length=255) + + class AbstractParent(models.Model): + name = models.IntegerField() + + class Meta: + abstract = True + + msg = ( + "Auto-generated field 'concreteparent_ptr' in class 'Descendant' " + "for parent_link to base class 'ConcreteParent' clashes with " + "declared field of the same name." + ) + with self.assertRaisesMessage(FieldError, msg): + + class Descendant(ConcreteParent, AbstractParent): + concreteparent_ptr = models.CharField(max_length=30) + + def test_abstract_model_with_regular_python_mixin_mro(self): + class AbstractModel(models.Model): + name = models.CharField(max_length=255) + age = models.IntegerField() + + class Meta: + abstract = True + + class Mixin: + age = None + + class Mixin2: + age = 2 + + class DescendantMixin(Mixin): + pass + + class ConcreteModel(models.Model): + foo = models.IntegerField() + + class ConcreteModel2(ConcreteModel): + age = models.SmallIntegerField() + + def fields(model): + if not hasattr(model, "_meta"): + return [] + return [(f.name, f.__class__) for f in model._meta.get_fields()] + + model_dict = {"__module__": "model_inheritance"} + model1 = type("Model1", (AbstractModel, Mixin), model_dict.copy()) + model2 = type("Model2", (Mixin2, AbstractModel), model_dict.copy()) + model3 = type("Model3", (DescendantMixin, AbstractModel), model_dict.copy()) + model4 = type("Model4", (Mixin2, Mixin, AbstractModel), model_dict.copy()) + model5 = type( + "Model5", (Mixin2, ConcreteModel2, Mixin, AbstractModel), model_dict.copy() + ) + + self.assertEqual( + fields(model1), + [ + ("id", models.AutoField), + ("name", models.CharField), + ("age", models.IntegerField), + ], + ) + + self.assertEqual( + fields(model2), [("id", models.AutoField), ("name", models.CharField)] + ) + self.assertEqual(getattr(model2, "age"), 2) + + self.assertEqual( + fields(model3), [("id", models.AutoField), ("name", models.CharField)] + ) + + self.assertEqual( + fields(model4), [("id", models.AutoField), ("name", models.CharField)] + ) + self.assertEqual(getattr(model4, "age"), 2) + + self.assertEqual( + fields(model5), + [ + ("id", models.AutoField), + ("foo", models.IntegerField), + ("concretemodel_ptr", models.OneToOneField), + ("age", models.SmallIntegerField), + ("concretemodel2_ptr", models.OneToOneField), + ("name", models.CharField), + ], + ) diff --git a/testbed/django__django/tests/model_inheritance/tests.py b/testbed/django__django/tests/model_inheritance/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..5bf1ef5db5437aa995e5ab64976466fd8f15e87e --- /dev/null +++ b/testbed/django__django/tests/model_inheritance/tests.py @@ -0,0 +1,670 @@ +from operator import attrgetter + +from django.core.exceptions import FieldError, ValidationError +from django.db import connection, models +from django.db.models.query_utils import DeferredAttribute +from django.test import SimpleTestCase, TestCase +from django.test.utils import CaptureQueriesContext, isolate_apps + +from .models import ( + Base, + Chef, + CommonChild, + CommonInfo, + CustomSupplier, + GrandChild, + GrandParent, + ItalianRestaurant, + ItalianRestaurantCommonParent, + MixinModel, + Parent, + ParkingLot, + Place, + Post, + Restaurant, + Student, + SubBase, + Supplier, + Title, + Worker, +) + + +class ModelInheritanceTests(TestCase): + def test_abstract(self): + # The Student and Worker models both have 'name' and 'age' fields on + # them and inherit the __str__() method, just as with normal Python + # subclassing. This is useful if you want to factor out common + # information for programming purposes, but still completely + # independent separate models at the database level. + w1 = Worker.objects.create(name="Fred", age=35, job="Quarry worker") + Worker.objects.create(name="Barney", age=34, job="Quarry worker") + + s = Student.objects.create(name="Pebbles", age=5, school_class="1B") + + self.assertEqual(str(w1), "Worker Fred") + self.assertEqual(str(s), "Student Pebbles") + + # The children inherit the Meta class of their parents (if they don't + # specify their own). + self.assertSequenceEqual( + Worker.objects.values("name"), + [ + {"name": "Barney"}, + {"name": "Fred"}, + ], + ) + + # Since Student does not subclass CommonInfo's Meta, it has the effect + # of completely overriding it. So ordering by name doesn't take place + # for Students. + self.assertEqual(Student._meta.ordering, []) + + # However, the CommonInfo class cannot be used as a normal model (it + # doesn't exist as a model). + with self.assertRaisesMessage( + AttributeError, "'CommonInfo' has no attribute 'objects'" + ): + CommonInfo.objects.all() + + def test_reverse_relation_for_different_hierarchy_tree(self): + # Even though p.supplier for a Place 'p' (a parent of a Supplier), a + # Restaurant object cannot access that reverse relation, since it's not + # part of the Place-Supplier Hierarchy. + self.assertSequenceEqual(Place.objects.filter(supplier__name="foo"), []) + msg = ( + "Cannot resolve keyword 'supplier' into field. Choices are: " + "address, chef, chef_id, id, italianrestaurant, lot, name, " + "place_ptr, place_ptr_id, provider, rating, serves_hot_dogs, serves_pizza" + ) + with self.assertRaisesMessage(FieldError, msg): + Restaurant.objects.filter(supplier__name="foo") + + def test_model_with_distinct_accessors(self): + # The Post model has distinct accessors for the Comment and Link models. + post = Post.objects.create(title="Lorem Ipsum") + post.attached_comment_set.create(content="Save $ on V1agr@", is_spam=True) + post.attached_link_set.create( + content="The web framework for perfections with deadlines.", + url="http://www.djangoproject.com/", + ) + + # The Post model doesn't have an attribute called + # 'attached_%(class)s_set'. + msg = "'Post' object has no attribute 'attached_%(class)s_set'" + with self.assertRaisesMessage(AttributeError, msg): + getattr(post, "attached_%(class)s_set") + + def test_model_with_distinct_related_query_name(self): + self.assertSequenceEqual( + Post.objects.filter(attached_model_inheritance_comments__is_spam=True), [] + ) + + # The Post model doesn't have a related query accessor based on + # related_name (attached_comment_set). + msg = "Cannot resolve keyword 'attached_comment_set' into field." + with self.assertRaisesMessage(FieldError, msg): + Post.objects.filter(attached_comment_set__is_spam=True) + + def test_meta_fields_and_ordering(self): + # Make sure Restaurant and ItalianRestaurant have the right fields in + # the right order. + self.assertEqual( + [f.name for f in Restaurant._meta.fields], + [ + "id", + "name", + "address", + "place_ptr", + "rating", + "serves_hot_dogs", + "serves_pizza", + "chef", + ], + ) + self.assertEqual( + [f.name for f in ItalianRestaurant._meta.fields], + [ + "id", + "name", + "address", + "place_ptr", + "rating", + "serves_hot_dogs", + "serves_pizza", + "chef", + "restaurant_ptr", + "serves_gnocchi", + ], + ) + self.assertEqual(Restaurant._meta.ordering, ["-rating"]) + + def test_custompk_m2m(self): + b = Base.objects.create() + b.titles.add(Title.objects.create(title="foof")) + s = SubBase.objects.create(sub_id=b.id) + b = Base.objects.get(pk=s.id) + self.assertNotEqual(b.pk, s.pk) + # Low-level test for related_val + self.assertEqual(s.titles.related_val, (s.id,)) + # Higher level test for correct query values (title foof not + # accidentally found). + self.assertSequenceEqual(s.titles.all(), []) + + def test_create_diamond_mti_default_pk(self): + # 1 INSERT for each base. + with self.assertNumQueries(4): + common_child = CommonChild.objects.create() + # 3 SELECTs for the parents, 1 UPDATE for the child. + with self.assertNumQueries(4): + common_child.save() + + def test_create_diamond_mti_common_parent(self): + with self.assertNumQueries(4): + italian_restaurant_child = ItalianRestaurantCommonParent.objects.create( + name="Ristorante Miron", + address="1234 W. Ash", + ) + + self.assertEqual( + italian_restaurant_child.italianrestaurant_ptr.place_ptr, + italian_restaurant_child.place_ptr_two, + ) + self.assertEqual( + italian_restaurant_child.italianrestaurant_ptr.restaurant_ptr, + italian_restaurant_child.restaurant_ptr, + ) + self.assertEqual( + italian_restaurant_child.restaurant_ptr.place_ptr, + italian_restaurant_child.place_ptr_two, + ) + self.assertEqual(italian_restaurant_child.name, "Ristorante Miron") + self.assertEqual(italian_restaurant_child.address, "1234 W. Ash") + + def test_update_parent_filtering(self): + """ + Updating a field of a model subclass doesn't issue an UPDATE + query constrained by an inner query (#10399). + """ + supplier = Supplier.objects.create( + name="Central market", + address="610 some street", + ) + # Capture the expected query in a database agnostic way + with CaptureQueriesContext(connection) as captured_queries: + Place.objects.filter(pk=supplier.pk).update(name=supplier.name) + expected_sql = captured_queries[0]["sql"] + # Capture the queries executed when a subclassed model instance is saved. + with CaptureQueriesContext(connection) as captured_queries: + supplier.save(update_fields=("name",)) + for query in captured_queries: + sql = query["sql"] + if "UPDATE" in sql: + self.assertEqual(expected_sql, sql) + + def test_create_child_no_update(self): + """Creating a child with non-abstract parents only issues INSERTs.""" + + def a(): + GrandChild.objects.create( + email="grand_parent@example.com", + first_name="grand", + last_name="parent", + ) + + def b(): + GrandChild().save() + + for i, test in enumerate([a, b]): + with self.subTest(i=i), self.assertNumQueries(4), CaptureQueriesContext( + connection + ) as queries: + test() + for query in queries: + sql = query["sql"] + self.assertIn("INSERT INTO", sql, sql) + + def test_create_copy_with_inherited_m2m(self): + restaurant = Restaurant.objects.create() + supplier = CustomSupplier.objects.create( + name="Central market", address="944 W. Fullerton" + ) + supplier.customers.set([restaurant]) + old_customers = supplier.customers.all() + supplier.pk = None + supplier.id = None + supplier._state.adding = True + supplier.save() + supplier.customers.set(old_customers) + supplier = Supplier.objects.get(pk=supplier.pk) + self.assertCountEqual(supplier.customers.all(), old_customers) + self.assertSequenceEqual(supplier.customers.all(), [restaurant]) + + def test_eq(self): + # Equality doesn't transfer in multitable inheritance. + self.assertNotEqual(Place(id=1), Restaurant(id=1)) + self.assertNotEqual(Restaurant(id=1), Place(id=1)) + + def test_mixin_init(self): + m = MixinModel() + self.assertEqual(m.other_attr, 1) + + @isolate_apps("model_inheritance") + def test_abstract_parent_link(self): + class A(models.Model): + pass + + class B(A): + a = models.OneToOneField("A", parent_link=True, on_delete=models.CASCADE) + + class Meta: + abstract = True + + class C(B): + pass + + self.assertIs(C._meta.parents[A], C._meta.get_field("a")) + + @isolate_apps("model_inheritance") + def test_init_subclass(self): + saved_kwargs = {} + + class A(models.Model): + def __init_subclass__(cls, **kwargs): + super().__init_subclass__() + saved_kwargs.update(kwargs) + + kwargs = {"x": 1, "y": 2, "z": 3} + + class B(A, **kwargs): + pass + + self.assertEqual(saved_kwargs, kwargs) + + @isolate_apps("model_inheritance") + def test_set_name(self): + class ClassAttr: + called = None + + def __set_name__(self_, owner, name): + self.assertIsNone(self_.called) + self_.called = (owner, name) + + class A(models.Model): + attr = ClassAttr() + + self.assertEqual(A.attr.called, (A, "attr")) + + def test_inherited_ordering_pk_desc(self): + p1 = Parent.objects.create(first_name="Joe", email="joe@email.com") + p2 = Parent.objects.create(first_name="Jon", email="jon@email.com") + expected_order_by_sql = "ORDER BY %s.%s DESC" % ( + connection.ops.quote_name(Parent._meta.db_table), + connection.ops.quote_name(Parent._meta.get_field("grandparent_ptr").column), + ) + qs = Parent.objects.all() + self.assertSequenceEqual(qs, [p2, p1]) + self.assertIn(expected_order_by_sql, str(qs.query)) + + def test_queryset_class_getitem(self): + self.assertIs(models.QuerySet[Post], models.QuerySet) + self.assertIs(models.QuerySet[Post, Post], models.QuerySet) + self.assertIs(models.QuerySet[Post, int, str], models.QuerySet) + + def test_shadow_parent_attribute_with_field(self): + class ScalarParent(models.Model): + foo = 1 + + class ScalarOverride(ScalarParent): + foo = models.IntegerField() + + self.assertEqual(type(ScalarOverride.foo), DeferredAttribute) + + def test_shadow_parent_property_with_field(self): + class PropertyParent(models.Model): + @property + def foo(self): + pass + + class PropertyOverride(PropertyParent): + foo = models.IntegerField() + + self.assertEqual(type(PropertyOverride.foo), DeferredAttribute) + + def test_shadow_parent_method_with_field(self): + class MethodParent(models.Model): + def foo(self): + pass + + class MethodOverride(MethodParent): + foo = models.IntegerField() + + self.assertEqual(type(MethodOverride.foo), DeferredAttribute) + + +class ModelInheritanceDataTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.restaurant = Restaurant.objects.create( + name="Demon Dogs", + address="944 W. Fullerton", + serves_hot_dogs=True, + serves_pizza=False, + rating=2, + ) + + chef = Chef.objects.create(name="Albert") + cls.italian_restaurant = ItalianRestaurant.objects.create( + name="Ristorante Miron", + address="1234 W. Ash", + serves_hot_dogs=False, + serves_pizza=False, + serves_gnocchi=True, + rating=4, + chef=chef, + ) + + def test_filter_inherited_model(self): + self.assertQuerySetEqual( + ItalianRestaurant.objects.filter(address="1234 W. Ash"), + [ + "Ristorante Miron", + ], + attrgetter("name"), + ) + + def test_update_inherited_model(self): + self.italian_restaurant.address = "1234 W. Elm" + self.italian_restaurant.save() + self.assertQuerySetEqual( + ItalianRestaurant.objects.filter(address="1234 W. Elm"), + [ + "Ristorante Miron", + ], + attrgetter("name"), + ) + + def test_parent_fields_available_for_filtering_in_child_model(self): + # Parent fields can be used directly in filters on the child model. + self.assertQuerySetEqual( + Restaurant.objects.filter(name="Demon Dogs"), + [ + "Demon Dogs", + ], + attrgetter("name"), + ) + self.assertQuerySetEqual( + ItalianRestaurant.objects.filter(address="1234 W. Ash"), + [ + "Ristorante Miron", + ], + attrgetter("name"), + ) + + def test_filter_on_parent_returns_object_of_parent_type(self): + # Filters against the parent model return objects of the parent's type. + p = Place.objects.get(name="Demon Dogs") + self.assertIs(type(p), Place) + + def test_parent_child_one_to_one_link(self): + # Since the parent and child are linked by an automatically created + # OneToOneField, you can get from the parent to the child by using the + # child's name. + self.assertEqual( + Place.objects.get(name="Demon Dogs").restaurant, + Restaurant.objects.get(name="Demon Dogs"), + ) + self.assertEqual( + Place.objects.get(name="Ristorante Miron").restaurant.italianrestaurant, + ItalianRestaurant.objects.get(name="Ristorante Miron"), + ) + self.assertEqual( + Restaurant.objects.get(name="Ristorante Miron").italianrestaurant, + ItalianRestaurant.objects.get(name="Ristorante Miron"), + ) + + def test_parent_child_one_to_one_link_on_nonrelated_objects(self): + # This won't work because the Demon Dogs restaurant is not an Italian + # restaurant. + with self.assertRaises(ItalianRestaurant.DoesNotExist): + Place.objects.get(name="Demon Dogs").restaurant.italianrestaurant + + def test_inherited_does_not_exist_exception(self): + # An ItalianRestaurant which does not exist is also a Place which does + # not exist. + with self.assertRaises(Place.DoesNotExist): + ItalianRestaurant.objects.get(name="The Noodle Void") + + def test_inherited_multiple_objects_returned_exception(self): + # MultipleObjectsReturned is also inherited. + with self.assertRaises(Place.MultipleObjectsReturned): + Restaurant.objects.get() + + def test_related_objects_for_inherited_models(self): + # Related objects work just as they normally do. + s1 = Supplier.objects.create(name="Joe's Chickens", address="123 Sesame St") + s1.customers.set([self.restaurant, self.italian_restaurant]) + s2 = Supplier.objects.create(name="Luigi's Pasta", address="456 Sesame St") + s2.customers.set([self.italian_restaurant]) + + # This won't work because the Place we select is not a Restaurant (it's + # a Supplier). + p = Place.objects.get(name="Joe's Chickens") + with self.assertRaises(Restaurant.DoesNotExist): + p.restaurant + + self.assertEqual(p.supplier, s1) + self.assertQuerySetEqual( + self.italian_restaurant.provider.order_by("-name"), + ["Luigi's Pasta", "Joe's Chickens"], + attrgetter("name"), + ) + self.assertQuerySetEqual( + Restaurant.objects.filter(provider__name__contains="Chickens"), + [ + "Ristorante Miron", + "Demon Dogs", + ], + attrgetter("name"), + ) + self.assertQuerySetEqual( + ItalianRestaurant.objects.filter(provider__name__contains="Chickens"), + [ + "Ristorante Miron", + ], + attrgetter("name"), + ) + + ParkingLot.objects.create(name="Main St", address="111 Main St", main_site=s1) + ParkingLot.objects.create( + name="Well Lit", address="124 Sesame St", main_site=self.italian_restaurant + ) + + self.assertEqual( + Restaurant.objects.get(lot__name="Well Lit").name, "Ristorante Miron" + ) + + def test_update_works_on_parent_and_child_models_at_once(self): + # The update() command can update fields in parent and child classes at + # once (although it executed multiple SQL queries to do so). + rows = Restaurant.objects.filter( + serves_hot_dogs=True, name__contains="D" + ).update(name="Demon Puppies", serves_hot_dogs=False) + self.assertEqual(rows, 1) + + r1 = Restaurant.objects.get(pk=self.restaurant.pk) + self.assertFalse(r1.serves_hot_dogs) + self.assertEqual(r1.name, "Demon Puppies") + + def test_values_works_on_parent_model_fields(self): + # The values() command also works on fields from parent models. + self.assertSequenceEqual( + ItalianRestaurant.objects.values("name", "rating"), + [ + {"rating": 4, "name": "Ristorante Miron"}, + ], + ) + + def test_select_related_works_on_parent_model_fields(self): + # select_related works with fields from the parent object as if they + # were a normal part of the model. + self.assertNumQueries(2, lambda: ItalianRestaurant.objects.all()[0].chef) + self.assertNumQueries( + 1, lambda: ItalianRestaurant.objects.select_related("chef")[0].chef + ) + + def test_select_related_defer(self): + """ + #23370 - Should be able to defer child fields when using + select_related() from parent to child. + """ + qs = ( + Restaurant.objects.select_related("italianrestaurant") + .defer("italianrestaurant__serves_gnocchi") + .order_by("rating") + ) + + # The field was actually deferred + with self.assertNumQueries(2): + objs = list(qs.all()) + self.assertTrue(objs[1].italianrestaurant.serves_gnocchi) + + # Model fields where assigned correct values + self.assertEqual(qs[0].name, "Demon Dogs") + self.assertEqual(qs[0].rating, 2) + self.assertEqual(qs[1].italianrestaurant.name, "Ristorante Miron") + self.assertEqual(qs[1].italianrestaurant.rating, 4) + + def test_parent_cache_reuse(self): + place = Place.objects.create() + GrandChild.objects.create(place=place) + grand_parent = GrandParent.objects.latest("pk") + with self.assertNumQueries(1): + self.assertEqual(grand_parent.place, place) + parent = grand_parent.parent + with self.assertNumQueries(0): + self.assertEqual(parent.place, place) + child = parent.child + with self.assertNumQueries(0): + self.assertEqual(child.place, place) + grandchild = child.grandchild + with self.assertNumQueries(0): + self.assertEqual(grandchild.place, place) + + def test_update_query_counts(self): + """ + Update queries do not generate unnecessary queries (#18304). + """ + with self.assertNumQueries(3): + self.italian_restaurant.save() + + def test_filter_inherited_on_null(self): + # Refs #12567 + Supplier.objects.create( + name="Central market", + address="610 some street", + ) + self.assertQuerySetEqual( + Place.objects.filter(supplier__isnull=False), + [ + "Central market", + ], + attrgetter("name"), + ) + self.assertQuerySetEqual( + Place.objects.filter(supplier__isnull=True).order_by("name"), + [ + "Demon Dogs", + "Ristorante Miron", + ], + attrgetter("name"), + ) + + def test_exclude_inherited_on_null(self): + # Refs #12567 + Supplier.objects.create( + name="Central market", + address="610 some street", + ) + self.assertQuerySetEqual( + Place.objects.exclude(supplier__isnull=False).order_by("name"), + [ + "Demon Dogs", + "Ristorante Miron", + ], + attrgetter("name"), + ) + self.assertQuerySetEqual( + Place.objects.exclude(supplier__isnull=True), + [ + "Central market", + ], + attrgetter("name"), + ) + + +@isolate_apps("model_inheritance", "model_inheritance.tests") +class InheritanceSameModelNameTests(SimpleTestCase): + def test_abstract_fk_related_name(self): + related_name = "%(app_label)s_%(class)s_references" + + class Referenced(models.Model): + class Meta: + app_label = "model_inheritance" + + class AbstractReferent(models.Model): + reference = models.ForeignKey( + Referenced, models.CASCADE, related_name=related_name + ) + + class Meta: + app_label = "model_inheritance" + abstract = True + + class Referent(AbstractReferent): + class Meta: + app_label = "model_inheritance" + + LocalReferent = Referent + + class Referent(AbstractReferent): + class Meta: + app_label = "tests" + + ForeignReferent = Referent + + self.assertFalse(hasattr(Referenced, related_name)) + self.assertIs( + Referenced.model_inheritance_referent_references.field.model, LocalReferent + ) + self.assertIs(Referenced.tests_referent_references.field.model, ForeignReferent) + + +class InheritanceUniqueTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.grand_parent = GrandParent.objects.create( + email="grand_parent@example.com", + first_name="grand", + last_name="parent", + ) + + def test_unique(self): + grand_child = GrandChild( + email=self.grand_parent.email, + first_name="grand", + last_name="child", + ) + msg = "Grand parent with this Email already exists." + with self.assertRaisesMessage(ValidationError, msg): + grand_child.validate_unique() + + def test_unique_together(self): + grand_child = GrandChild( + email="grand_child@example.com", + first_name=self.grand_parent.first_name, + last_name=self.grand_parent.last_name, + ) + msg = "Grand parent with this First name and Last name already exists." + with self.assertRaisesMessage(ValidationError, msg): + grand_child.validate_unique() diff --git a/testbed/django__django/tests/model_inheritance_regress/__init__.py b/testbed/django__django/tests/model_inheritance_regress/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_inheritance_regress/models.py b/testbed/django__django/tests/model_inheritance_regress/models.py new file mode 100644 index 0000000000000000000000000000000000000000..11886bb48d605da652c593f792f3c8ddb8881c53 --- /dev/null +++ b/testbed/django__django/tests/model_inheritance_regress/models.py @@ -0,0 +1,207 @@ +import datetime + +from django.db import models + + +class Place(models.Model): + name = models.CharField(max_length=50) + address = models.CharField(max_length=80) + + class Meta: + ordering = ("name",) + + +class Restaurant(Place): + serves_hot_dogs = models.BooleanField(default=False) + serves_pizza = models.BooleanField(default=False) + + +class ItalianRestaurant(Restaurant): + serves_gnocchi = models.BooleanField(default=False) + + +class ParkingLot(Place): + # An explicit link to the parent (we can control the attribute name). + parent = models.OneToOneField( + Place, models.CASCADE, primary_key=True, parent_link=True + ) + capacity = models.IntegerField() + + +class ParkingLot3(Place): + # The parent_link connector need not be the pk on the model. + primary_key = models.AutoField(primary_key=True) + parent = models.OneToOneField(Place, models.CASCADE, parent_link=True) + + +class ParkingLot4(models.Model): + # Test parent_link connector can be discovered in abstract classes. + parent = models.OneToOneField(Place, models.CASCADE, parent_link=True) + + class Meta: + abstract = True + + +class ParkingLot4A(ParkingLot4, Place): + pass + + +class ParkingLot4B(Place, ParkingLot4): + pass + + +class Supplier(models.Model): + name = models.CharField(max_length=50) + restaurant = models.ForeignKey(Restaurant, models.CASCADE) + + +class Wholesaler(Supplier): + retailer = models.ForeignKey( + Supplier, models.CASCADE, related_name="wholesale_supplier" + ) + + +class Parent(models.Model): + created = models.DateTimeField(default=datetime.datetime.now) + + +class Child(Parent): + name = models.CharField(max_length=10) + + +class SelfRefParent(models.Model): + parent_data = models.IntegerField() + self_data = models.ForeignKey("self", models.SET_NULL, null=True) + + +class SelfRefChild(SelfRefParent): + child_data = models.IntegerField() + + +class Article(models.Model): + headline = models.CharField(max_length=100) + pub_date = models.DateTimeField() + + class Meta: + ordering = ("-pub_date", "headline") + + +class ArticleWithAuthor(Article): + author = models.CharField(max_length=100) + + +class M2MBase(models.Model): + articles = models.ManyToManyField(Article) + + +class M2MChild(M2MBase): + name = models.CharField(max_length=50) + + +class Evaluation(Article): + quality = models.IntegerField() + + class Meta: + abstract = True + + +class QualityControl(Evaluation): + assignee = models.CharField(max_length=50) + + +class BaseM(models.Model): + base_name = models.CharField(max_length=100) + + +class DerivedM(BaseM): + customPK = models.IntegerField(primary_key=True) + derived_name = models.CharField(max_length=100) + + +class AuditBase(models.Model): + planned_date = models.DateField() + + class Meta: + abstract = True + verbose_name_plural = "Audits" + + +class CertificationAudit(AuditBase): + class Meta(AuditBase.Meta): + abstract = True + + +class InternalCertificationAudit(CertificationAudit): + auditing_dept = models.CharField(max_length=20) + + +# Abstract classes don't get m2m tables autocreated. +class Person(models.Model): + name = models.CharField(max_length=100) + + class Meta: + ordering = ("name",) + + +class AbstractEvent(models.Model): + name = models.CharField(max_length=100) + attendees = models.ManyToManyField(Person, related_name="%(class)s_set") + + class Meta: + abstract = True + ordering = ("name",) + + +class BirthdayParty(AbstractEvent): + pass + + +class BachelorParty(AbstractEvent): + pass + + +class MessyBachelorParty(BachelorParty): + pass + + +# Check concrete -> abstract -> concrete inheritance +class SearchableLocation(models.Model): + keywords = models.CharField(max_length=255) + + +class Station(SearchableLocation): + name = models.CharField(max_length=128) + + class Meta: + abstract = True + + +class BusStation(Station): + inbound = models.BooleanField(default=False) + + +class TrainStation(Station): + zone = models.IntegerField() + + +class User(models.Model): + username = models.CharField(max_length=30, unique=True) + + +class Profile(User): + profile_id = models.AutoField(primary_key=True) + extra = models.CharField(max_length=30, blank=True) + + +# Check concrete + concrete -> concrete -> concrete +class Politician(models.Model): + politician_id = models.AutoField(primary_key=True) + title = models.CharField(max_length=50) + + +class Congressman(Person, Politician): + state = models.CharField(max_length=2) + + +class Senator(Congressman): + pass diff --git a/testbed/django__django/tests/model_inheritance_regress/tests.py b/testbed/django__django/tests/model_inheritance_regress/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..1a9f953d6f1f320478aafee88d4f28a65ab2eeb5 --- /dev/null +++ b/testbed/django__django/tests/model_inheritance_regress/tests.py @@ -0,0 +1,676 @@ +""" +Regression tests for Model inheritance behavior. +""" +import datetime +from operator import attrgetter +from unittest import expectedFailure + +from django import forms +from django.test import TestCase + +from .models import ( + ArticleWithAuthor, + BachelorParty, + BirthdayParty, + BusStation, + Child, + Congressman, + DerivedM, + InternalCertificationAudit, + ItalianRestaurant, + M2MChild, + MessyBachelorParty, + ParkingLot, + ParkingLot3, + ParkingLot4A, + ParkingLot4B, + Person, + Place, + Politician, + Profile, + QualityControl, + Restaurant, + SelfRefChild, + SelfRefParent, + Senator, + Supplier, + TrainStation, + User, + Wholesaler, +) + + +class ModelInheritanceTest(TestCase): + def test_model_inheritance(self): + # Regression for #7350, #7202 + # When you create a Parent object with a specific reference to an + # existent child instance, saving the Parent doesn't duplicate the + # child. This behavior is only activated during a raw save - it is + # mostly relevant to deserialization, but any sort of CORBA style + # 'narrow()' API would require a similar approach. + + # Create a child-parent-grandparent chain + place1 = Place(name="Guido's House of Pasta", address="944 W. Fullerton") + place1.save_base(raw=True) + restaurant = Restaurant( + place_ptr=place1, + serves_hot_dogs=True, + serves_pizza=False, + ) + restaurant.save_base(raw=True) + italian_restaurant = ItalianRestaurant( + restaurant_ptr=restaurant, serves_gnocchi=True + ) + italian_restaurant.save_base(raw=True) + + # Create a child-parent chain with an explicit parent link + place2 = Place(name="Main St", address="111 Main St") + place2.save_base(raw=True) + park = ParkingLot(parent=place2, capacity=100) + park.save_base(raw=True) + + # No extra parent objects have been created. + places = list(Place.objects.all()) + self.assertEqual(places, [place1, place2]) + + dicts = list(Restaurant.objects.values("name", "serves_hot_dogs")) + self.assertEqual( + dicts, [{"name": "Guido's House of Pasta", "serves_hot_dogs": True}] + ) + + dicts = list( + ItalianRestaurant.objects.values( + "name", "serves_hot_dogs", "serves_gnocchi" + ) + ) + self.assertEqual( + dicts, + [ + { + "name": "Guido's House of Pasta", + "serves_gnocchi": True, + "serves_hot_dogs": True, + } + ], + ) + + dicts = list(ParkingLot.objects.values("name", "capacity")) + self.assertEqual( + dicts, + [ + { + "capacity": 100, + "name": "Main St", + } + ], + ) + + # You can also update objects when using a raw save. + place1.name = "Guido's All New House of Pasta" + place1.save_base(raw=True) + + restaurant.serves_hot_dogs = False + restaurant.save_base(raw=True) + + italian_restaurant.serves_gnocchi = False + italian_restaurant.save_base(raw=True) + + place2.name = "Derelict lot" + place2.save_base(raw=True) + + park.capacity = 50 + park.save_base(raw=True) + + # No extra parent objects after an update, either. + places = list(Place.objects.all()) + self.assertEqual(places, [place2, place1]) + self.assertEqual(places[0].name, "Derelict lot") + self.assertEqual(places[1].name, "Guido's All New House of Pasta") + + dicts = list(Restaurant.objects.values("name", "serves_hot_dogs")) + self.assertEqual( + dicts, + [ + { + "name": "Guido's All New House of Pasta", + "serves_hot_dogs": False, + } + ], + ) + + dicts = list( + ItalianRestaurant.objects.values( + "name", "serves_hot_dogs", "serves_gnocchi" + ) + ) + self.assertEqual( + dicts, + [ + { + "name": "Guido's All New House of Pasta", + "serves_gnocchi": False, + "serves_hot_dogs": False, + } + ], + ) + + dicts = list(ParkingLot.objects.values("name", "capacity")) + self.assertEqual( + dicts, + [ + { + "capacity": 50, + "name": "Derelict lot", + } + ], + ) + + # If you try to raw_save a parent attribute onto a child object, + # the attribute will be ignored. + + italian_restaurant.name = "Lorenzo's Pasta Hut" + italian_restaurant.save_base(raw=True) + + # Note that the name has not changed + # - name is an attribute of Place, not ItalianRestaurant + dicts = list( + ItalianRestaurant.objects.values( + "name", "serves_hot_dogs", "serves_gnocchi" + ) + ) + self.assertEqual( + dicts, + [ + { + "name": "Guido's All New House of Pasta", + "serves_gnocchi": False, + "serves_hot_dogs": False, + } + ], + ) + + def test_issue_7105(self): + # Regressions tests for #7105: dates() queries should be able to use + # fields from the parent model as easily as the child. + Child.objects.create( + name="child", created=datetime.datetime(2008, 6, 26, 17, 0, 0) + ) + datetimes = list(Child.objects.datetimes("created", "month")) + self.assertEqual(datetimes, [datetime.datetime(2008, 6, 1, 0, 0)]) + + def test_issue_7276(self): + # Regression test for #7276: calling delete() on a model with + # multi-table inheritance should delete the associated rows from any + # ancestor tables, as well as any descendent objects. + place1 = Place(name="Guido's House of Pasta", address="944 W. Fullerton") + place1.save_base(raw=True) + restaurant = Restaurant( + place_ptr=place1, + serves_hot_dogs=True, + serves_pizza=False, + ) + restaurant.save_base(raw=True) + italian_restaurant = ItalianRestaurant( + restaurant_ptr=restaurant, serves_gnocchi=True + ) + italian_restaurant.save_base(raw=True) + + ident = ItalianRestaurant.objects.all()[0].id + self.assertEqual(Place.objects.get(pk=ident), place1) + Restaurant.objects.create( + name="a", + address="xx", + serves_hot_dogs=True, + serves_pizza=False, + ) + + # This should delete both Restaurants, plus the related places, plus + # the ItalianRestaurant. + Restaurant.objects.all().delete() + + with self.assertRaises(Place.DoesNotExist): + Place.objects.get(pk=ident) + with self.assertRaises(ItalianRestaurant.DoesNotExist): + ItalianRestaurant.objects.get(pk=ident) + + def test_issue_6755(self): + """ + Regression test for #6755 + """ + r = Restaurant(serves_pizza=False, serves_hot_dogs=False) + r.save() + self.assertEqual(r.id, r.place_ptr_id) + orig_id = r.id + r = Restaurant(place_ptr_id=orig_id, serves_pizza=True, serves_hot_dogs=False) + r.save() + self.assertEqual(r.id, orig_id) + self.assertEqual(r.id, r.place_ptr_id) + + def test_issue_11764(self): + """ + Regression test for #11764 + """ + wholesalers = list(Wholesaler.objects.select_related()) + self.assertEqual(wholesalers, []) + + def test_issue_7853(self): + """ + Regression test for #7853 + If the parent class has a self-referential link, make sure that any + updates to that link via the child update the right table. + """ + obj = SelfRefChild.objects.create(child_data=37, parent_data=42) + obj.delete() + + def test_get_next_previous_by_date(self): + """ + Regression tests for #8076 + get_(next/previous)_by_date should work + """ + c1 = ArticleWithAuthor( + headline="ArticleWithAuthor 1", + author="Person 1", + pub_date=datetime.datetime(2005, 8, 1, 3, 0), + ) + c1.save() + c2 = ArticleWithAuthor( + headline="ArticleWithAuthor 2", + author="Person 2", + pub_date=datetime.datetime(2005, 8, 1, 10, 0), + ) + c2.save() + c3 = ArticleWithAuthor( + headline="ArticleWithAuthor 3", + author="Person 3", + pub_date=datetime.datetime(2005, 8, 2), + ) + c3.save() + + self.assertEqual(c1.get_next_by_pub_date(), c2) + self.assertEqual(c2.get_next_by_pub_date(), c3) + with self.assertRaises(ArticleWithAuthor.DoesNotExist): + c3.get_next_by_pub_date() + self.assertEqual(c3.get_previous_by_pub_date(), c2) + self.assertEqual(c2.get_previous_by_pub_date(), c1) + with self.assertRaises(ArticleWithAuthor.DoesNotExist): + c1.get_previous_by_pub_date() + + def test_inherited_fields(self): + """ + Regression test for #8825 and #9390 + Make sure all inherited fields (esp. m2m fields, in this case) appear + on the child class. + """ + m2mchildren = list(M2MChild.objects.filter(articles__isnull=False)) + self.assertEqual(m2mchildren, []) + + # Ordering should not include any database column more than once (this + # is most likely to occur naturally with model inheritance, so we + # check it here). Regression test for #9390. This necessarily pokes at + # the SQL string for the query, since the duplicate problems are only + # apparent at that late stage. + qs = ArticleWithAuthor.objects.order_by("pub_date", "pk") + sql = qs.query.get_compiler(qs.db).as_sql()[0] + fragment = sql[sql.find("ORDER BY") :] + pos = fragment.find("pub_date") + self.assertEqual(fragment.find("pub_date", pos + 1), -1) + + def test_queryset_update_on_parent_model(self): + """ + Regression test for #10362 + It is possible to call update() and only change a field in + an ancestor model. + """ + article = ArticleWithAuthor.objects.create( + author="fred", + headline="Hey there!", + pub_date=datetime.datetime(2009, 3, 1, 8, 0, 0), + ) + update = ArticleWithAuthor.objects.filter(author="fred").update( + headline="Oh, no!" + ) + self.assertEqual(update, 1) + update = ArticleWithAuthor.objects.filter(pk=article.pk).update( + headline="Oh, no!" + ) + self.assertEqual(update, 1) + + derivedm1 = DerivedM.objects.create( + customPK=44, + base_name="b1", + derived_name="d1", + ) + self.assertEqual(derivedm1.customPK, 44) + self.assertEqual(derivedm1.base_name, "b1") + self.assertEqual(derivedm1.derived_name, "d1") + derivedms = list(DerivedM.objects.all()) + self.assertEqual(derivedms, [derivedm1]) + + def test_use_explicit_o2o_to_parent_as_pk(self): + """ + The connector from child to parent need not be the pk on the child. + """ + self.assertEqual(ParkingLot3._meta.pk.name, "primary_key") + # the child->parent link + self.assertEqual(ParkingLot3._meta.get_ancestor_link(Place).name, "parent") + + def test_use_explicit_o2o_to_parent_from_abstract_model(self): + self.assertEqual(ParkingLot4A._meta.pk.name, "parent") + ParkingLot4A.objects.create( + name="Parking4A", + address="21 Jump Street", + ) + + self.assertEqual(ParkingLot4B._meta.pk.name, "parent") + ParkingLot4A.objects.create( + name="Parking4B", + address="21 Jump Street", + ) + + def test_all_fields_from_abstract_base_class(self): + """ + Regression tests for #7588 + """ + # All fields from an ABC, including those inherited non-abstractly + # should be available on child classes (#7588). Creating this instance + # should work without error. + QualityControl.objects.create( + headline="Problems in Django", + pub_date=datetime.datetime.now(), + quality=10, + assignee="adrian", + ) + + def test_abstract_base_class_m2m_relation_inheritance(self): + # many-to-many relations defined on an abstract base class are + # correctly inherited (and created) on the child class. + p1 = Person.objects.create(name="Alice") + p2 = Person.objects.create(name="Bob") + p3 = Person.objects.create(name="Carol") + p4 = Person.objects.create(name="Dave") + + birthday = BirthdayParty.objects.create(name="Birthday party for Alice") + birthday.attendees.set([p1, p3]) + + bachelor = BachelorParty.objects.create(name="Bachelor party for Bob") + bachelor.attendees.set([p2, p4]) + + parties = list(p1.birthdayparty_set.all()) + self.assertEqual(parties, [birthday]) + + parties = list(p1.bachelorparty_set.all()) + self.assertEqual(parties, []) + + parties = list(p2.bachelorparty_set.all()) + self.assertEqual(parties, [bachelor]) + + # A subclass of a subclass of an abstract model doesn't get its own + # accessor. + self.assertFalse(hasattr(p2, "messybachelorparty_set")) + + # ... but it does inherit the m2m from its parent + messy = MessyBachelorParty.objects.create(name="Bachelor party for Dave") + messy.attendees.set([p4]) + messy_parent = messy.bachelorparty_ptr + + parties = list(p4.bachelorparty_set.all()) + self.assertEqual(parties, [bachelor, messy_parent]) + + def test_abstract_verbose_name_plural_inheritance(self): + """ + verbose_name_plural correctly inherited from ABC if inheritance chain + includes an abstract model. + """ + # Regression test for #11369: verbose_name_plural should be inherited + # from an ABC even when there are one or more intermediate + # abstract models in the inheritance chain, for consistency with + # verbose_name. + self.assertEqual(InternalCertificationAudit._meta.verbose_name_plural, "Audits") + + def test_inherited_nullable_exclude(self): + obj = SelfRefChild.objects.create(child_data=37, parent_data=42) + self.assertQuerySetEqual( + SelfRefParent.objects.exclude(self_data=72), [obj.pk], attrgetter("pk") + ) + self.assertQuerySetEqual( + SelfRefChild.objects.exclude(self_data=72), [obj.pk], attrgetter("pk") + ) + + def test_concrete_abstract_concrete_pk(self): + """ + Primary key set correctly with concrete->abstract->concrete inheritance. + """ + # Regression test for #13987: Primary key is incorrectly determined + # when more than one model has a concrete->abstract->concrete + # inheritance hierarchy. + self.assertEqual( + len( + [field for field in BusStation._meta.local_fields if field.primary_key] + ), + 1, + ) + self.assertEqual( + len( + [ + field + for field in TrainStation._meta.local_fields + if field.primary_key + ] + ), + 1, + ) + self.assertIs(BusStation._meta.pk.model, BusStation) + self.assertIs(TrainStation._meta.pk.model, TrainStation) + + def test_inherited_unique_field_with_form(self): + """ + A model which has different primary key for the parent model passes + unique field checking correctly (#17615). + """ + + class ProfileForm(forms.ModelForm): + class Meta: + model = Profile + fields = "__all__" + + User.objects.create(username="user_only") + p = Profile.objects.create(username="user_with_profile") + form = ProfileForm( + {"username": "user_with_profile", "extra": "hello"}, instance=p + ) + self.assertTrue(form.is_valid()) + + def test_inheritance_joins(self): + # Test for #17502 - check that filtering through two levels of + # inheritance chain doesn't generate extra joins. + qs = ItalianRestaurant.objects.all() + self.assertEqual(str(qs.query).count("JOIN"), 2) + qs = ItalianRestaurant.objects.filter(name="foo") + self.assertEqual(str(qs.query).count("JOIN"), 2) + + @expectedFailure + def test_inheritance_values_joins(self): + # It would be nice (but not too important) to skip the middle join in + # this case. Skipping is possible as nothing from the middle model is + # used in the qs and top contains direct pointer to the bottom model. + qs = ItalianRestaurant.objects.values_list("serves_gnocchi").filter(name="foo") + self.assertEqual(str(qs.query).count("JOIN"), 1) + + def test_issue_21554(self): + senator = Senator.objects.create(name="John Doe", title="X", state="Y") + senator = Senator.objects.get(pk=senator.pk) + self.assertEqual(senator.name, "John Doe") + self.assertEqual(senator.title, "X") + self.assertEqual(senator.state, "Y") + + def test_inheritance_resolve_columns(self): + Restaurant.objects.create( + name="Bobs Cafe", + address="Somewhere", + serves_pizza=True, + serves_hot_dogs=True, + ) + p = Place.objects.select_related("restaurant")[0] + self.assertIsInstance(p.restaurant.serves_pizza, bool) + + def test_inheritance_select_related(self): + # Regression test for #7246 + r1 = Restaurant.objects.create( + name="Nobu", serves_hot_dogs=True, serves_pizza=False + ) + r2 = Restaurant.objects.create( + name="Craft", serves_hot_dogs=False, serves_pizza=True + ) + Supplier.objects.create(name="John", restaurant=r1) + Supplier.objects.create(name="Jane", restaurant=r2) + + self.assertQuerySetEqual( + Supplier.objects.order_by("name").select_related(), + [ + "Jane", + "John", + ], + attrgetter("name"), + ) + + jane = Supplier.objects.order_by("name").select_related("restaurant")[0] + self.assertEqual(jane.restaurant.name, "Craft") + + def test_filter_with_parent_fk(self): + r = Restaurant.objects.create() + s = Supplier.objects.create(restaurant=r) + # The mismatch between Restaurant and Place is intentional (#28175). + self.assertSequenceEqual( + Supplier.objects.filter(restaurant__in=Place.objects.all()), [s] + ) + + def test_ptr_accessor_assigns_state(self): + r = Restaurant.objects.create() + self.assertIs(r.place_ptr._state.adding, False) + self.assertEqual(r.place_ptr._state.db, "default") + + def test_related_filtering_query_efficiency_ticket_15844(self): + r = Restaurant.objects.create( + name="Guido's House of Pasta", + address="944 W. Fullerton", + serves_hot_dogs=True, + serves_pizza=False, + ) + s = Supplier.objects.create(restaurant=r) + with self.assertNumQueries(1): + self.assertSequenceEqual(Supplier.objects.filter(restaurant=r), [s]) + with self.assertNumQueries(1): + self.assertSequenceEqual(r.supplier_set.all(), [s]) + + def test_queries_on_parent_access(self): + italian_restaurant = ItalianRestaurant.objects.create( + name="Guido's House of Pasta", + address="944 W. Fullerton", + serves_hot_dogs=True, + serves_pizza=False, + serves_gnocchi=True, + ) + + # No queries are made when accessing the parent objects. + italian_restaurant = ItalianRestaurant.objects.get(pk=italian_restaurant.pk) + with self.assertNumQueries(0): + restaurant = italian_restaurant.restaurant_ptr + self.assertEqual(restaurant.place_ptr.restaurant, restaurant) + self.assertEqual(restaurant.italianrestaurant, italian_restaurant) + + # One query is made when accessing the parent objects when the instance + # is deferred. + italian_restaurant = ItalianRestaurant.objects.only("serves_gnocchi").get( + pk=italian_restaurant.pk + ) + with self.assertNumQueries(1): + restaurant = italian_restaurant.restaurant_ptr + self.assertEqual(restaurant.place_ptr.restaurant, restaurant) + self.assertEqual(restaurant.italianrestaurant, italian_restaurant) + + # No queries are made when accessing the parent objects when the + # instance has deferred a field not present in the parent table. + italian_restaurant = ItalianRestaurant.objects.defer("serves_gnocchi").get( + pk=italian_restaurant.pk + ) + with self.assertNumQueries(0): + restaurant = italian_restaurant.restaurant_ptr + self.assertEqual(restaurant.place_ptr.restaurant, restaurant) + self.assertEqual(restaurant.italianrestaurant, italian_restaurant) + + def test_id_field_update_on_ancestor_change(self): + place1 = Place.objects.create(name="House of Pasta", address="944 Fullerton") + place2 = Place.objects.create(name="House of Pizza", address="954 Fullerton") + place3 = Place.objects.create(name="Burger house", address="964 Fullerton") + restaurant1 = Restaurant.objects.create( + place_ptr=place1, + serves_hot_dogs=True, + serves_pizza=False, + ) + restaurant2 = Restaurant.objects.create( + place_ptr=place2, + serves_hot_dogs=True, + serves_pizza=False, + ) + + italian_restaurant = ItalianRestaurant.objects.create( + restaurant_ptr=restaurant1, + serves_gnocchi=True, + ) + # Changing the parent of a restaurant changes the restaurant's ID & PK. + restaurant1.place_ptr = place3 + self.assertEqual(restaurant1.pk, place3.pk) + self.assertEqual(restaurant1.id, place3.id) + self.assertEqual(restaurant1.pk, restaurant1.id) + restaurant1.place_ptr = None + self.assertIsNone(restaurant1.pk) + self.assertIsNone(restaurant1.id) + # Changing the parent of an italian restaurant changes the restaurant's + # ID & PK. + italian_restaurant.restaurant_ptr = restaurant2 + self.assertEqual(italian_restaurant.pk, restaurant2.pk) + self.assertEqual(italian_restaurant.id, restaurant2.id) + self.assertEqual(italian_restaurant.pk, italian_restaurant.id) + italian_restaurant.restaurant_ptr = None + self.assertIsNone(italian_restaurant.pk) + self.assertIsNone(italian_restaurant.id) + + def test_create_new_instance_with_pk_equals_none(self): + p1 = Profile.objects.create(username="john") + p2 = User.objects.get(pk=p1.user_ptr_id).profile + # Create a new profile by setting pk = None. + p2.pk = None + p2.user_ptr_id = None + p2.username = "bill" + p2.save() + self.assertEqual(Profile.objects.count(), 2) + self.assertEqual(User.objects.get(pk=p1.user_ptr_id).username, "john") + + def test_create_new_instance_with_pk_equals_none_multi_inheritance(self): + c1 = Congressman.objects.create(state="PA", name="John", title="senator 1") + c2 = Person.objects.get(pk=c1.pk).congressman + # Create a new congressman by setting pk = None. + c2.pk = None + c2.id = None + c2.politician_ptr_id = None + c2.name = "Bill" + c2.title = "senator 2" + c2.save() + self.assertEqual(Congressman.objects.count(), 2) + self.assertEqual(Person.objects.get(pk=c1.pk).name, "John") + self.assertEqual( + Politician.objects.get(pk=c1.politician_ptr_id).title, + "senator 1", + ) + + def test_mti_update_parent_through_child(self): + Politician.objects.create() + Congressman.objects.create() + Congressman.objects.update(title="senator 1") + self.assertEqual(Congressman.objects.get().title, "senator 1") + + def test_mti_update_grand_parent_through_child(self): + Politician.objects.create() + Senator.objects.create() + Senator.objects.update(title="senator 1") + self.assertEqual(Senator.objects.get().title, "senator 1") diff --git a/testbed/django__django/tests/model_meta/__init__.py b/testbed/django__django/tests/model_meta/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_meta/models.py b/testbed/django__django/tests/model_meta/models.py new file mode 100644 index 0000000000000000000000000000000000000000..6da62be2ac2636245a9c1c08ea6add8df61a4c85 --- /dev/null +++ b/testbed/django__django/tests/model_meta/models.py @@ -0,0 +1,183 @@ +from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation +from django.contrib.contenttypes.models import ContentType +from django.db import models + + +class Relation(models.Model): + pass + + +class InstanceOnlyDescriptor: + def __get__(self, instance, cls=None): + if instance is None: + raise AttributeError("Instance only") + return 1 + + +class AbstractPerson(models.Model): + # DATA fields + data_abstract = models.CharField(max_length=10) + fk_abstract = models.ForeignKey( + Relation, models.CASCADE, related_name="fk_abstract_rel" + ) + + # M2M fields + m2m_abstract = models.ManyToManyField(Relation, related_name="m2m_abstract_rel") + friends_abstract = models.ManyToManyField("self", symmetrical=True) + following_abstract = models.ManyToManyField( + "self", related_name="followers_abstract", symmetrical=False + ) + + # VIRTUAL fields + data_not_concrete_abstract = models.ForeignObject( + Relation, + on_delete=models.CASCADE, + from_fields=["abstract_non_concrete_id"], + to_fields=["id"], + related_name="fo_abstract_rel", + ) + + # GFK fields + content_type_abstract = models.ForeignKey( + ContentType, models.CASCADE, related_name="+" + ) + object_id_abstract = models.PositiveIntegerField() + content_object_abstract = GenericForeignKey( + "content_type_abstract", "object_id_abstract" + ) + + # GR fields + generic_relation_abstract = GenericRelation(Relation) + + class Meta: + abstract = True + + @property + def test_property(self): + return 1 + + test_instance_only_descriptor = InstanceOnlyDescriptor() + + +class BasePerson(AbstractPerson): + # DATA fields + data_base = models.CharField(max_length=10) + fk_base = models.ForeignKey(Relation, models.CASCADE, related_name="fk_base_rel") + + # M2M fields + m2m_base = models.ManyToManyField(Relation, related_name="m2m_base_rel") + friends_base = models.ManyToManyField("self", symmetrical=True) + following_base = models.ManyToManyField( + "self", related_name="followers_base", symmetrical=False + ) + + # VIRTUAL fields + data_not_concrete_base = models.ForeignObject( + Relation, + on_delete=models.CASCADE, + from_fields=["base_non_concrete_id"], + to_fields=["id"], + related_name="fo_base_rel", + ) + + # GFK fields + content_type_base = models.ForeignKey(ContentType, models.CASCADE, related_name="+") + object_id_base = models.PositiveIntegerField() + content_object_base = GenericForeignKey("content_type_base", "object_id_base") + + # GR fields + generic_relation_base = GenericRelation(Relation) + + +class Person(BasePerson): + # DATA fields + data_inherited = models.CharField(max_length=10) + fk_inherited = models.ForeignKey( + Relation, models.CASCADE, related_name="fk_concrete_rel" + ) + + # M2M Fields + m2m_inherited = models.ManyToManyField(Relation, related_name="m2m_concrete_rel") + friends_inherited = models.ManyToManyField("self", symmetrical=True) + following_inherited = models.ManyToManyField( + "self", related_name="followers_concrete", symmetrical=False + ) + + # VIRTUAL fields + data_not_concrete_inherited = models.ForeignObject( + Relation, + on_delete=models.CASCADE, + from_fields=["model_non_concrete_id"], + to_fields=["id"], + related_name="fo_concrete_rel", + ) + + # GFK fields + content_type_concrete = models.ForeignKey( + ContentType, models.CASCADE, related_name="+" + ) + object_id_concrete = models.PositiveIntegerField() + content_object_concrete = GenericForeignKey( + "content_type_concrete", "object_id_concrete" + ) + + # GR fields + generic_relation_concrete = GenericRelation(Relation) + + +class ProxyPerson(Person): + class Meta: + proxy = True + + +class PersonThroughProxySubclass(ProxyPerson): + pass + + +class Relating(models.Model): + # ForeignKey to BasePerson + baseperson = models.ForeignKey( + BasePerson, models.CASCADE, related_name="relating_baseperson" + ) + baseperson_hidden = models.ForeignKey(BasePerson, models.CASCADE, related_name="+") + + # ForeignKey to Person + person = models.ForeignKey(Person, models.CASCADE, related_name="relating_person") + person_hidden = models.ForeignKey(Person, models.CASCADE, related_name="+") + + # ForeignKey to ProxyPerson + proxyperson = models.ForeignKey( + ProxyPerson, models.CASCADE, related_name="relating_proxyperson" + ) + proxyperson_hidden = models.ForeignKey( + ProxyPerson, models.CASCADE, related_name="relating_proxyperson_hidden+" + ) + + # ManyToManyField to BasePerson + basepeople = models.ManyToManyField(BasePerson, related_name="relating_basepeople") + basepeople_hidden = models.ManyToManyField(BasePerson, related_name="+") + + # ManyToManyField to Person + people = models.ManyToManyField(Person, related_name="relating_people") + people_hidden = models.ManyToManyField(Person, related_name="+") + + +# ParentListTests models +class CommonAncestor(models.Model): + pass + + +class FirstParent(CommonAncestor): + first_ancestor = models.OneToOneField( + CommonAncestor, models.CASCADE, primary_key=True, parent_link=True + ) + + +class SecondParent(CommonAncestor): + second_ancestor = models.OneToOneField( + CommonAncestor, models.CASCADE, primary_key=True, parent_link=True + ) + + +class Child(FirstParent, SecondParent): + pass diff --git a/testbed/django__django/tests/model_meta/results.py b/testbed/django__django/tests/model_meta/results.py new file mode 100644 index 0000000000000000000000000000000000000000..2b942ee8149528d95225386f35d01bfde40a617b --- /dev/null +++ b/testbed/django__django/tests/model_meta/results.py @@ -0,0 +1,895 @@ +from .models import AbstractPerson, BasePerson, Person, ProxyPerson, Relating, Relation + +TEST_RESULTS = { + "get_all_field_names": { + Person: [ + "baseperson_ptr", + "baseperson_ptr_id", + "content_type_abstract", + "content_type_abstract_id", + "content_type_base", + "content_type_base_id", + "content_type_concrete", + "content_type_concrete_id", + "data_abstract", + "data_base", + "data_inherited", + "data_not_concrete_abstract", + "data_not_concrete_base", + "data_not_concrete_inherited", + "fk_abstract", + "fk_abstract_id", + "fk_base", + "fk_base_id", + "fk_inherited", + "fk_inherited_id", + "followers_abstract", + "followers_base", + "followers_concrete", + "following_abstract", + "following_base", + "following_inherited", + "friends_abstract", + "friends_base", + "friends_inherited", + "generic_relation_abstract", + "generic_relation_base", + "generic_relation_concrete", + "id", + "m2m_abstract", + "m2m_base", + "m2m_inherited", + "object_id_abstract", + "object_id_base", + "object_id_concrete", + "relating_basepeople", + "relating_baseperson", + "relating_people", + "relating_person", + ], + BasePerson: [ + "content_type_abstract", + "content_type_abstract_id", + "content_type_base", + "content_type_base_id", + "data_abstract", + "data_base", + "data_not_concrete_abstract", + "data_not_concrete_base", + "fk_abstract", + "fk_abstract_id", + "fk_base", + "fk_base_id", + "followers_abstract", + "followers_base", + "following_abstract", + "following_base", + "friends_abstract", + "friends_base", + "generic_relation_abstract", + "generic_relation_base", + "id", + "m2m_abstract", + "m2m_base", + "object_id_abstract", + "object_id_base", + "person", + "relating_basepeople", + "relating_baseperson", + ], + AbstractPerson: [ + "content_type_abstract", + "content_type_abstract_id", + "data_abstract", + "data_not_concrete_abstract", + "fk_abstract", + "fk_abstract_id", + "following_abstract", + "friends_abstract", + "generic_relation_abstract", + "m2m_abstract", + "object_id_abstract", + ], + Relating: [ + "basepeople", + "basepeople_hidden", + "baseperson", + "baseperson_hidden", + "baseperson_hidden_id", + "baseperson_id", + "id", + "people", + "people_hidden", + "person", + "person_hidden", + "person_hidden_id", + "person_id", + "proxyperson", + "proxyperson_hidden", + "proxyperson_hidden_id", + "proxyperson_id", + ], + }, + "fields": { + Person: [ + "id", + "data_abstract", + "fk_abstract_id", + "data_not_concrete_abstract", + "content_type_abstract_id", + "object_id_abstract", + "data_base", + "fk_base_id", + "data_not_concrete_base", + "content_type_base_id", + "object_id_base", + "baseperson_ptr_id", + "data_inherited", + "fk_inherited_id", + "data_not_concrete_inherited", + "content_type_concrete_id", + "object_id_concrete", + ], + BasePerson: [ + "id", + "data_abstract", + "fk_abstract_id", + "data_not_concrete_abstract", + "content_type_abstract_id", + "object_id_abstract", + "data_base", + "fk_base_id", + "data_not_concrete_base", + "content_type_base_id", + "object_id_base", + ], + AbstractPerson: [ + "data_abstract", + "fk_abstract_id", + "data_not_concrete_abstract", + "content_type_abstract_id", + "object_id_abstract", + ], + Relating: [ + "id", + "baseperson_id", + "baseperson_hidden_id", + "person_id", + "person_hidden_id", + "proxyperson_id", + "proxyperson_hidden_id", + ], + }, + "local_fields": { + Person: [ + "baseperson_ptr_id", + "data_inherited", + "fk_inherited_id", + "data_not_concrete_inherited", + "content_type_concrete_id", + "object_id_concrete", + ], + BasePerson: [ + "id", + "data_abstract", + "fk_abstract_id", + "data_not_concrete_abstract", + "content_type_abstract_id", + "object_id_abstract", + "data_base", + "fk_base_id", + "data_not_concrete_base", + "content_type_base_id", + "object_id_base", + ], + AbstractPerson: [ + "data_abstract", + "fk_abstract_id", + "data_not_concrete_abstract", + "content_type_abstract_id", + "object_id_abstract", + ], + Relating: [ + "id", + "baseperson_id", + "baseperson_hidden_id", + "person_id", + "person_hidden_id", + "proxyperson_id", + "proxyperson_hidden_id", + ], + }, + "local_concrete_fields": { + Person: [ + "baseperson_ptr_id", + "data_inherited", + "fk_inherited_id", + "content_type_concrete_id", + "object_id_concrete", + ], + BasePerson: [ + "id", + "data_abstract", + "fk_abstract_id", + "content_type_abstract_id", + "object_id_abstract", + "data_base", + "fk_base_id", + "content_type_base_id", + "object_id_base", + ], + AbstractPerson: [ + "data_abstract", + "fk_abstract_id", + "content_type_abstract_id", + "object_id_abstract", + ], + Relating: [ + "id", + "baseperson_id", + "baseperson_hidden_id", + "person_id", + "person_hidden_id", + "proxyperson_id", + "proxyperson_hidden_id", + ], + }, + "many_to_many": { + Person: [ + "m2m_abstract", + "friends_abstract", + "following_abstract", + "m2m_base", + "friends_base", + "following_base", + "m2m_inherited", + "friends_inherited", + "following_inherited", + ], + BasePerson: [ + "m2m_abstract", + "friends_abstract", + "following_abstract", + "m2m_base", + "friends_base", + "following_base", + ], + AbstractPerson: [ + "m2m_abstract", + "friends_abstract", + "following_abstract", + ], + Relating: [ + "basepeople", + "basepeople_hidden", + "people", + "people_hidden", + ], + }, + "many_to_many_with_model": { + Person: [ + BasePerson, + BasePerson, + BasePerson, + BasePerson, + BasePerson, + BasePerson, + None, + None, + None, + ], + BasePerson: [ + None, + None, + None, + None, + None, + None, + ], + AbstractPerson: [ + None, + None, + None, + ], + Relating: [ + None, + None, + None, + None, + ], + }, + "get_all_related_objects_with_model_legacy": { + Person: ( + ("relating_baseperson", BasePerson), + ("relating_person", None), + ), + BasePerson: ( + ("person", None), + ("relating_baseperson", None), + ), + Relation: ( + ("fk_abstract_rel", None), + ("fo_abstract_rel", None), + ("fk_base_rel", None), + ("fo_base_rel", None), + ("fk_concrete_rel", None), + ("fo_concrete_rel", None), + ), + }, + "get_all_related_objects_with_model_hidden_local": { + Person: ( + ("+", None), + ("_model_meta_relating_people_hidden_+", None), + ("Person_following_inherited+", None), + ("Person_following_inherited+", None), + ("Person_friends_inherited+", None), + ("Person_friends_inherited+", None), + ("Person_m2m_inherited+", None), + ("Relating_people+", None), + ("Relating_people_hidden+", None), + ("followers_concrete", None), + ("friends_inherited_rel_+", None), + ("personthroughproxysubclass", None), + ("relating_people", None), + ("relating_person", None), + ("relating_proxyperson", None), + ("relating_proxyperson_hidden+", None), + ), + ProxyPerson: ( + ("+", Person), + ("_model_meta_relating_people_hidden_+", Person), + ("Person_following_inherited+", Person), + ("Person_following_inherited+", Person), + ("Person_friends_inherited+", Person), + ("Person_friends_inherited+", Person), + ("Person_m2m_inherited+", Person), + ("Relating_people+", Person), + ("Relating_people_hidden+", Person), + ("followers_concrete", Person), + ("friends_inherited_rel_+", Person), + ("personthroughproxysubclass", Person), + ("relating_people", Person), + ("relating_person", Person), + ("relating_proxyperson", Person), + ("relating_proxyperson_hidden+", Person), + ), + BasePerson: ( + ("+", None), + ("_model_meta_relating_basepeople_hidden_+", None), + ("BasePerson_following_abstract+", None), + ("BasePerson_following_abstract+", None), + ("BasePerson_following_base+", None), + ("BasePerson_following_base+", None), + ("BasePerson_friends_abstract+", None), + ("BasePerson_friends_abstract+", None), + ("BasePerson_friends_base+", None), + ("BasePerson_friends_base+", None), + ("BasePerson_m2m_abstract+", None), + ("BasePerson_m2m_base+", None), + ("Relating_basepeople+", None), + ("Relating_basepeople_hidden+", None), + ("followers_abstract", None), + ("followers_base", None), + ("friends_abstract_rel_+", None), + ("friends_base_rel_+", None), + ("person", None), + ("relating_basepeople", None), + ("relating_baseperson", None), + ), + Relation: ( + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("BasePerson_m2m_abstract+", None), + ("BasePerson_m2m_base+", None), + ("Person_m2m_inherited+", None), + ("fk_abstract_rel", None), + ("fk_base_rel", None), + ("fk_concrete_rel", None), + ("fo_abstract_rel", None), + ("fo_base_rel", None), + ("fo_concrete_rel", None), + ("m2m_abstract_rel", None), + ("m2m_base_rel", None), + ("m2m_concrete_rel", None), + ), + }, + "get_all_related_objects_with_model_hidden": { + Person: ( + ("+", BasePerson), + ("+", None), + ("_model_meta_relating_basepeople_hidden_+", BasePerson), + ("_model_meta_relating_people_hidden_+", None), + ("BasePerson_following_abstract+", BasePerson), + ("BasePerson_following_abstract+", BasePerson), + ("BasePerson_following_base+", BasePerson), + ("BasePerson_following_base+", BasePerson), + ("BasePerson_friends_abstract+", BasePerson), + ("BasePerson_friends_abstract+", BasePerson), + ("BasePerson_friends_base+", BasePerson), + ("BasePerson_friends_base+", BasePerson), + ("BasePerson_m2m_abstract+", BasePerson), + ("BasePerson_m2m_base+", BasePerson), + ("Person_following_inherited+", None), + ("Person_following_inherited+", None), + ("Person_friends_inherited+", None), + ("Person_friends_inherited+", None), + ("Person_m2m_inherited+", None), + ("Relating_basepeople+", BasePerson), + ("Relating_basepeople_hidden+", BasePerson), + ("Relating_people+", None), + ("Relating_people_hidden+", None), + ("followers_abstract", BasePerson), + ("followers_base", BasePerson), + ("followers_concrete", None), + ("friends_abstract_rel_+", BasePerson), + ("friends_base_rel_+", BasePerson), + ("friends_inherited_rel_+", None), + ("personthroughproxysubclass", None), + ("relating_basepeople", BasePerson), + ("relating_baseperson", BasePerson), + ("relating_people", None), + ("relating_person", None), + ("relating_proxyperson", None), + ("relating_proxyperson_hidden+", None), + ), + ProxyPerson: ( + ("+", BasePerson), + ("+", Person), + ("_model_meta_relating_basepeople_hidden_+", BasePerson), + ("_model_meta_relating_people_hidden_+", Person), + ("BasePerson_following_abstract+", BasePerson), + ("BasePerson_following_abstract+", BasePerson), + ("BasePerson_following_base+", BasePerson), + ("BasePerson_following_base+", BasePerson), + ("BasePerson_friends_abstract+", BasePerson), + ("BasePerson_friends_abstract+", BasePerson), + ("BasePerson_friends_base+", BasePerson), + ("BasePerson_friends_base+", BasePerson), + ("BasePerson_m2m_abstract+", BasePerson), + ("BasePerson_m2m_base+", BasePerson), + ("Person_following_inherited+", Person), + ("Person_following_inherited+", Person), + ("Person_friends_inherited+", Person), + ("Person_friends_inherited+", Person), + ("Person_m2m_inherited+", Person), + ("Relating_basepeople+", BasePerson), + ("Relating_basepeople_hidden+", BasePerson), + ("Relating_people+", Person), + ("Relating_people_hidden+", Person), + ("followers_abstract", BasePerson), + ("followers_base", BasePerson), + ("followers_concrete", Person), + ("friends_abstract_rel_+", BasePerson), + ("friends_base_rel_+", BasePerson), + ("friends_inherited_rel_+", Person), + ("personthroughproxysubclass", Person), + ("relating_basepeople", BasePerson), + ("relating_baseperson", BasePerson), + ("relating_people", Person), + ("relating_person", Person), + ("relating_proxyperson", Person), + ("relating_proxyperson_hidden+", Person), + ), + BasePerson: ( + ("+", None), + ("_model_meta_relating_basepeople_hidden_+", None), + ("BasePerson_following_abstract+", None), + ("BasePerson_following_abstract+", None), + ("BasePerson_following_base+", None), + ("BasePerson_following_base+", None), + ("BasePerson_friends_abstract+", None), + ("BasePerson_friends_abstract+", None), + ("BasePerson_friends_base+", None), + ("BasePerson_friends_base+", None), + ("BasePerson_m2m_abstract+", None), + ("BasePerson_m2m_base+", None), + ("Relating_basepeople+", None), + ("Relating_basepeople_hidden+", None), + ("followers_abstract", None), + ("followers_base", None), + ("friends_abstract_rel_+", None), + ("friends_base_rel_+", None), + ("person", None), + ("relating_basepeople", None), + ("relating_baseperson", None), + ), + Relation: ( + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("BasePerson_m2m_abstract+", None), + ("BasePerson_m2m_base+", None), + ("Person_m2m_inherited+", None), + ("fk_abstract_rel", None), + ("fk_base_rel", None), + ("fk_concrete_rel", None), + ("fo_abstract_rel", None), + ("fo_base_rel", None), + ("fo_concrete_rel", None), + ("m2m_abstract_rel", None), + ("m2m_base_rel", None), + ("m2m_concrete_rel", None), + ), + }, + "get_all_related_objects_with_model_local": { + Person: ( + ("followers_concrete", None), + ("personthroughproxysubclass", None), + ("relating_person", None), + ("relating_people", None), + ("relating_proxyperson", None), + ), + ProxyPerson: ( + ("followers_concrete", Person), + ("personthroughproxysubclass", Person), + ("relating_person", Person), + ("relating_people", Person), + ("relating_proxyperson", Person), + ), + BasePerson: ( + ("followers_abstract", None), + ("followers_base", None), + ("person", None), + ("relating_baseperson", None), + ("relating_basepeople", None), + ), + Relation: ( + ("fk_abstract_rel", None), + ("fo_abstract_rel", None), + ("fk_base_rel", None), + ("fo_base_rel", None), + ("m2m_abstract_rel", None), + ("m2m_base_rel", None), + ("fk_concrete_rel", None), + ("fo_concrete_rel", None), + ("m2m_concrete_rel", None), + ), + }, + "get_all_related_objects_with_model": { + Person: ( + ("followers_abstract", BasePerson), + ("followers_base", BasePerson), + ("relating_baseperson", BasePerson), + ("relating_basepeople", BasePerson), + ("followers_concrete", None), + ("personthroughproxysubclass", None), + ("relating_person", None), + ("relating_people", None), + ("relating_proxyperson", None), + ), + ProxyPerson: ( + ("followers_abstract", BasePerson), + ("followers_base", BasePerson), + ("relating_baseperson", BasePerson), + ("relating_basepeople", BasePerson), + ("followers_concrete", Person), + ("personthroughproxysubclass", Person), + ("relating_person", Person), + ("relating_people", Person), + ("relating_proxyperson", Person), + ), + BasePerson: ( + ("followers_abstract", None), + ("followers_base", None), + ("person", None), + ("relating_baseperson", None), + ("relating_basepeople", None), + ), + Relation: ( + ("fk_abstract_rel", None), + ("fo_abstract_rel", None), + ("fk_base_rel", None), + ("fo_base_rel", None), + ("m2m_abstract_rel", None), + ("m2m_base_rel", None), + ("fk_concrete_rel", None), + ("fo_concrete_rel", None), + ("m2m_concrete_rel", None), + ), + }, + "get_all_related_objects_with_model_local_legacy": { + Person: (("relating_person", None),), + BasePerson: (("person", None), ("relating_baseperson", None)), + Relation: ( + ("fk_abstract_rel", None), + ("fo_abstract_rel", None), + ("fk_base_rel", None), + ("fo_base_rel", None), + ("fk_concrete_rel", None), + ("fo_concrete_rel", None), + ), + }, + "get_all_related_objects_with_model_hidden_legacy": { + BasePerson: ( + ("+", None), + ("BasePerson_following_abstract+", None), + ("BasePerson_following_abstract+", None), + ("BasePerson_following_base+", None), + ("BasePerson_following_base+", None), + ("BasePerson_friends_abstract+", None), + ("BasePerson_friends_abstract+", None), + ("BasePerson_friends_base+", None), + ("BasePerson_friends_base+", None), + ("BasePerson_m2m_abstract+", None), + ("BasePerson_m2m_base+", None), + ("Relating_basepeople+", None), + ("Relating_basepeople_hidden+", None), + ("person", None), + ("relating_baseperson", None), + ), + Person: ( + ("+", BasePerson), + ("+", None), + ("BasePerson_following_abstract+", BasePerson), + ("BasePerson_following_abstract+", BasePerson), + ("BasePerson_following_base+", BasePerson), + ("BasePerson_following_base+", BasePerson), + ("BasePerson_friends_abstract+", BasePerson), + ("BasePerson_friends_abstract+", BasePerson), + ("BasePerson_friends_base+", BasePerson), + ("BasePerson_friends_base+", BasePerson), + ("BasePerson_m2m_abstract+", BasePerson), + ("BasePerson_m2m_base+", BasePerson), + ("Person_following_inherited+", None), + ("Person_following_inherited+", None), + ("Person_friends_inherited+", None), + ("Person_friends_inherited+", None), + ("Person_m2m_inherited+", None), + ("Relating_basepeople+", BasePerson), + ("Relating_basepeople_hidden+", BasePerson), + ("Relating_people+", None), + ("Relating_people_hidden+", None), + ("relating_baseperson", BasePerson), + ("relating_person", None), + ), + Relation: ( + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("BasePerson_m2m_abstract+", None), + ("BasePerson_m2m_base+", None), + ("Person_m2m_inherited+", None), + ("fk_abstract_rel", None), + ("fk_base_rel", None), + ("fk_concrete_rel", None), + ("fo_abstract_rel", None), + ("fo_base_rel", None), + ("fo_concrete_rel", None), + ), + }, + "get_all_related_objects_with_model_hidden_local_legacy": { + BasePerson: ( + ("+", None), + ("BasePerson_following_abstract+", None), + ("BasePerson_following_abstract+", None), + ("BasePerson_following_base+", None), + ("BasePerson_following_base+", None), + ("BasePerson_friends_abstract+", None), + ("BasePerson_friends_abstract+", None), + ("BasePerson_friends_base+", None), + ("BasePerson_friends_base+", None), + ("BasePerson_m2m_abstract+", None), + ("BasePerson_m2m_base+", None), + ("Relating_basepeople+", None), + ("Relating_basepeople_hidden+", None), + ("person", None), + ("relating_baseperson", None), + ), + Person: ( + ("+", None), + ("Person_following_inherited+", None), + ("Person_following_inherited+", None), + ("Person_friends_inherited+", None), + ("Person_friends_inherited+", None), + ("Person_m2m_inherited+", None), + ("Relating_people+", None), + ("Relating_people_hidden+", None), + ("relating_person", None), + ), + Relation: ( + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("BasePerson_m2m_abstract+", None), + ("BasePerson_m2m_base+", None), + ("Person_m2m_inherited+", None), + ("fk_abstract_rel", None), + ("fk_base_rel", None), + ("fk_concrete_rel", None), + ("fo_abstract_rel", None), + ("fo_base_rel", None), + ("fo_concrete_rel", None), + ), + }, + "get_all_related_objects_with_model_proxy_legacy": { + BasePerson: ( + ("person", None), + ("relating_baseperson", None), + ), + Person: ( + ("relating_baseperson", BasePerson), + ("relating_person", None), + ("relating_proxyperson", None), + ), + Relation: ( + ("fk_abstract_rel", None), + ("fo_abstract_rel", None), + ("fk_base_rel", None), + ("fo_base_rel", None), + ("fk_concrete_rel", None), + ("fo_concrete_rel", None), + ), + }, + "get_all_related_objects_with_model_proxy_hidden_legacy": { + BasePerson: ( + ("+", None), + ("BasePerson_following_abstract+", None), + ("BasePerson_following_abstract+", None), + ("BasePerson_following_base+", None), + ("BasePerson_following_base+", None), + ("BasePerson_friends_abstract+", None), + ("BasePerson_friends_abstract+", None), + ("BasePerson_friends_base+", None), + ("BasePerson_friends_base+", None), + ("BasePerson_m2m_abstract+", None), + ("BasePerson_m2m_base+", None), + ("Relating_basepeople+", None), + ("Relating_basepeople_hidden+", None), + ("person", None), + ("relating_baseperson", None), + ), + Person: ( + ("+", BasePerson), + ("+", None), + ("+", None), + ("BasePerson_following_abstract+", BasePerson), + ("BasePerson_following_abstract+", BasePerson), + ("BasePerson_following_base+", BasePerson), + ("BasePerson_following_base+", BasePerson), + ("BasePerson_friends_abstract+", BasePerson), + ("BasePerson_friends_abstract+", BasePerson), + ("BasePerson_friends_base+", BasePerson), + ("BasePerson_friends_base+", BasePerson), + ("BasePerson_m2m_abstract+", BasePerson), + ("BasePerson_m2m_base+", BasePerson), + ("Person_following_inherited+", None), + ("Person_following_inherited+", None), + ("Person_friends_inherited+", None), + ("Person_friends_inherited+", None), + ("Person_m2m_inherited+", None), + ("Relating_basepeople+", BasePerson), + ("Relating_basepeople_hidden+", BasePerson), + ("Relating_people+", None), + ("Relating_people_hidden+", None), + ("relating_baseperson", BasePerson), + ("relating_person", None), + ("relating_proxyperson", None), + ), + Relation: ( + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("+", None), + ("BasePerson_m2m_abstract+", None), + ("BasePerson_m2m_base+", None), + ("Person_m2m_inherited+", None), + ("fk_abstract_rel", None), + ("fk_base_rel", None), + ("fk_concrete_rel", None), + ("fo_abstract_rel", None), + ("fo_base_rel", None), + ("fo_concrete_rel", None), + ), + }, + "get_all_related_many_to_many_with_model_legacy": { + BasePerson: ( + ("friends_abstract_rel_+", None), + ("followers_abstract", None), + ("friends_base_rel_+", None), + ("followers_base", None), + ("relating_basepeople", None), + ("_model_meta_relating_basepeople_hidden_+", None), + ), + Person: ( + ("friends_abstract_rel_+", BasePerson), + ("followers_abstract", BasePerson), + ("friends_base_rel_+", BasePerson), + ("followers_base", BasePerson), + ("relating_basepeople", BasePerson), + ("_model_meta_relating_basepeople_hidden_+", BasePerson), + ("friends_inherited_rel_+", None), + ("followers_concrete", None), + ("relating_people", None), + ("_relating_people_hidden_+", None), + ), + Relation: ( + ("m2m_abstract_rel", None), + ("m2m_base_rel", None), + ("m2m_concrete_rel", None), + ), + }, + "get_all_related_many_to_many_local_legacy": { + BasePerson: [ + "friends_abstract_rel_+", + "followers_abstract", + "friends_base_rel_+", + "followers_base", + "relating_basepeople", + "_model_meta_relating_basepeople_hidden_+", + ], + Person: [ + "friends_inherited_rel_+", + "followers_concrete", + "relating_people", + "_relating_people_hidden_+", + ], + Relation: [ + "m2m_abstract_rel", + "m2m_base_rel", + "m2m_concrete_rel", + ], + }, + "private_fields": { + AbstractPerson: [ + "generic_relation_abstract", + "content_object_abstract", + ], + BasePerson: [ + "generic_relation_base", + "content_object_base", + "generic_relation_abstract", + "content_object_abstract", + ], + Person: [ + "content_object_concrete", + "generic_relation_concrete", + "generic_relation_base", + "content_object_base", + "generic_relation_abstract", + "content_object_abstract", + ], + }, + "labels": { + AbstractPerson: "model_meta.AbstractPerson", + BasePerson: "model_meta.BasePerson", + Person: "model_meta.Person", + Relating: "model_meta.Relating", + }, + "lower_labels": { + AbstractPerson: "model_meta.abstractperson", + BasePerson: "model_meta.baseperson", + Person: "model_meta.person", + Relating: "model_meta.relating", + }, +} diff --git a/testbed/django__django/tests/model_meta/tests.py b/testbed/django__django/tests/model_meta/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..fe2f6e63da8dc2cdedfd28437dd0d16dcfaa7fc6 --- /dev/null +++ b/testbed/django__django/tests/model_meta/tests.py @@ -0,0 +1,347 @@ +from django.apps import apps +from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation +from django.core.exceptions import FieldDoesNotExist +from django.db.models import CharField, Field, ForeignObjectRel, ManyToManyField +from django.db.models.options import EMPTY_RELATION_TREE, IMMUTABLE_WARNING +from django.test import SimpleTestCase + +from .models import ( + AbstractPerson, + BasePerson, + Child, + CommonAncestor, + FirstParent, + Person, + ProxyPerson, + Relating, + Relation, + SecondParent, +) +from .results import TEST_RESULTS + + +class OptionsBaseTests(SimpleTestCase): + def _map_related_query_names(self, res): + return tuple((o.name, m) for o, m in res) + + def _map_names(self, res): + return tuple((f.name, m) for f, m in res) + + def _model(self, current_model, field): + model = field.model._meta.concrete_model + return None if model == current_model else model + + def _details(self, current_model, relation): + direct = isinstance(relation, (Field, GenericForeignKey)) + model = relation.model._meta.concrete_model + if model == current_model: + model = None + + field = relation if direct else relation.field + return ( + relation, + model, + direct, + bool(field.many_to_many), + ) # many_to_many can be None + + +class GetFieldsTests(OptionsBaseTests): + def test_get_fields_is_immutable(self): + msg = IMMUTABLE_WARNING % "get_fields()" + for _ in range(2): + # Running unit test twice to ensure both non-cached and cached result + # are immutable. + fields = Person._meta.get_fields() + with self.assertRaisesMessage(AttributeError, msg): + fields += ["errors"] + + +class LabelTests(OptionsBaseTests): + def test_label(self): + for model, expected_result in TEST_RESULTS["labels"].items(): + self.assertEqual(model._meta.label, expected_result) + + def test_label_lower(self): + for model, expected_result in TEST_RESULTS["lower_labels"].items(): + self.assertEqual(model._meta.label_lower, expected_result) + + +class DataTests(OptionsBaseTests): + def test_fields(self): + for model, expected_result in TEST_RESULTS["fields"].items(): + fields = model._meta.fields + self.assertEqual([f.attname for f in fields], expected_result) + + def test_local_fields(self): + def is_data_field(f): + return isinstance(f, Field) and not f.many_to_many + + for model, expected_result in TEST_RESULTS["local_fields"].items(): + fields = model._meta.local_fields + self.assertEqual([f.attname for f in fields], expected_result) + for f in fields: + self.assertEqual(f.model, model) + self.assertTrue(is_data_field(f)) + + def test_local_concrete_fields(self): + for model, expected_result in TEST_RESULTS["local_concrete_fields"].items(): + fields = model._meta.local_concrete_fields + self.assertEqual([f.attname for f in fields], expected_result) + for f in fields: + self.assertIsNotNone(f.column) + + +class M2MTests(OptionsBaseTests): + def test_many_to_many(self): + for model, expected_result in TEST_RESULTS["many_to_many"].items(): + fields = model._meta.many_to_many + self.assertEqual([f.attname for f in fields], expected_result) + for f in fields: + self.assertTrue(f.many_to_many and f.is_relation) + + def test_many_to_many_with_model(self): + for model, expected_result in TEST_RESULTS["many_to_many_with_model"].items(): + models = [self._model(model, field) for field in model._meta.many_to_many] + self.assertEqual(models, expected_result) + + +class RelatedObjectsTests(OptionsBaseTests): + def key_name(self, r): + return r[0] + + def test_related_objects(self): + result_key = "get_all_related_objects_with_model" + for model, expected in TEST_RESULTS[result_key].items(): + objects = [ + (field, self._model(model, field)) + for field in model._meta.get_fields() + if field.auto_created and not field.concrete + ] + self.assertEqual( + sorted(self._map_related_query_names(objects), key=self.key_name), + sorted(expected, key=self.key_name), + ) + + def test_related_objects_local(self): + result_key = "get_all_related_objects_with_model_local" + for model, expected in TEST_RESULTS[result_key].items(): + objects = [ + (field, self._model(model, field)) + for field in model._meta.get_fields(include_parents=False) + if field.auto_created and not field.concrete + ] + self.assertEqual( + sorted(self._map_related_query_names(objects), key=self.key_name), + sorted(expected, key=self.key_name), + ) + + def test_related_objects_include_hidden(self): + result_key = "get_all_related_objects_with_model_hidden" + for model, expected in TEST_RESULTS[result_key].items(): + objects = [ + (field, self._model(model, field)) + for field in model._meta.get_fields(include_hidden=True) + if field.auto_created and not field.concrete + ] + self.assertEqual( + sorted(self._map_names(objects), key=self.key_name), + sorted(expected, key=self.key_name), + ) + + def test_related_objects_include_hidden_local_only(self): + result_key = "get_all_related_objects_with_model_hidden_local" + for model, expected in TEST_RESULTS[result_key].items(): + objects = [ + (field, self._model(model, field)) + for field in model._meta.get_fields( + include_hidden=True, include_parents=False + ) + if field.auto_created and not field.concrete + ] + self.assertEqual( + sorted(self._map_names(objects), key=self.key_name), + sorted(expected, key=self.key_name), + ) + + +class PrivateFieldsTests(OptionsBaseTests): + def test_private_fields(self): + for model, expected_names in TEST_RESULTS["private_fields"].items(): + objects = model._meta.private_fields + self.assertEqual(sorted(f.name for f in objects), sorted(expected_names)) + + +class GetFieldByNameTests(OptionsBaseTests): + def test_get_data_field(self): + field_info = self._details(Person, Person._meta.get_field("data_abstract")) + self.assertEqual(field_info[1:], (BasePerson, True, False)) + self.assertIsInstance(field_info[0], CharField) + + def test_get_m2m_field(self): + field_info = self._details(Person, Person._meta.get_field("m2m_base")) + self.assertEqual(field_info[1:], (BasePerson, True, True)) + self.assertIsInstance(field_info[0], ManyToManyField) + + def test_get_related_object(self): + field_info = self._details( + Person, Person._meta.get_field("relating_baseperson") + ) + self.assertEqual(field_info[1:], (BasePerson, False, False)) + self.assertIsInstance(field_info[0], ForeignObjectRel) + + def test_get_related_m2m(self): + field_info = self._details(Person, Person._meta.get_field("relating_people")) + self.assertEqual(field_info[1:], (None, False, True)) + self.assertIsInstance(field_info[0], ForeignObjectRel) + + def test_get_generic_relation(self): + field_info = self._details( + Person, Person._meta.get_field("generic_relation_base") + ) + self.assertEqual(field_info[1:], (None, True, False)) + self.assertIsInstance(field_info[0], GenericRelation) + + def test_get_fields_only_searches_forward_on_apps_not_ready(self): + opts = Person._meta + # If apps registry is not ready, get_field() searches over only + # forward fields. + opts.apps.models_ready = False + try: + # 'data_abstract' is a forward field, and therefore will be found + self.assertTrue(opts.get_field("data_abstract")) + msg = ( + "Person has no field named 'relating_baseperson'. The app " + "cache isn't ready yet, so if this is an auto-created related " + "field, it won't be available yet." + ) + # 'data_abstract' is a reverse field, and will raise an exception + with self.assertRaisesMessage(FieldDoesNotExist, msg): + opts.get_field("relating_baseperson") + finally: + opts.apps.models_ready = True + + +class RelationTreeTests(SimpleTestCase): + all_models = (Relation, AbstractPerson, BasePerson, Person, ProxyPerson, Relating) + + def setUp(self): + apps.clear_cache() + + def test_clear_cache_clears_relation_tree(self): + # The apps.clear_cache is setUp() should have deleted all trees. + # Exclude abstract models that are not included in the Apps registry + # and have no cache. + all_models_with_cache = (m for m in self.all_models if not m._meta.abstract) + for m in all_models_with_cache: + self.assertNotIn("_relation_tree", m._meta.__dict__) + + def test_first_relation_tree_access_populates_all(self): + # On first access, relation tree should have populated cache. + self.assertTrue(self.all_models[0]._meta._relation_tree) + + # AbstractPerson does not have any relations, so relation_tree + # should just return an EMPTY_RELATION_TREE. + self.assertEqual(AbstractPerson._meta._relation_tree, EMPTY_RELATION_TREE) + + # All the other models should already have their relation tree + # in the internal __dict__ . + all_models_but_abstractperson = ( + m for m in self.all_models if m is not AbstractPerson + ) + for m in all_models_but_abstractperson: + self.assertIn("_relation_tree", m._meta.__dict__) + + def test_relations_related_objects(self): + # Testing non hidden related objects + self.assertEqual( + sorted( + field.related_query_name() + for field in Relation._meta._relation_tree + if not field.remote_field.field.remote_field.is_hidden() + ), + sorted( + [ + "fk_abstract_rel", + "fk_base_rel", + "fk_concrete_rel", + "fo_abstract_rel", + "fo_base_rel", + "fo_concrete_rel", + "m2m_abstract_rel", + "m2m_base_rel", + "m2m_concrete_rel", + ] + ), + ) + # Testing hidden related objects + self.assertEqual( + sorted( + field.related_query_name() for field in BasePerson._meta._relation_tree + ), + sorted( + [ + "+", + "_model_meta_relating_basepeople_hidden_+", + "BasePerson_following_abstract+", + "BasePerson_following_abstract+", + "BasePerson_following_base+", + "BasePerson_following_base+", + "BasePerson_friends_abstract+", + "BasePerson_friends_abstract+", + "BasePerson_friends_base+", + "BasePerson_friends_base+", + "BasePerson_m2m_abstract+", + "BasePerson_m2m_base+", + "Relating_basepeople+", + "Relating_basepeople_hidden+", + "followers_abstract", + "followers_base", + "friends_abstract_rel_+", + "friends_base_rel_+", + "person", + "relating_basepeople", + "relating_baseperson", + ] + ), + ) + self.assertEqual( + [ + field.related_query_name() + for field in AbstractPerson._meta._relation_tree + ], + [], + ) + + +class ParentListTests(SimpleTestCase): + def test_get_parent_list(self): + self.assertEqual(CommonAncestor._meta.get_parent_list(), []) + self.assertEqual(FirstParent._meta.get_parent_list(), [CommonAncestor]) + self.assertEqual(SecondParent._meta.get_parent_list(), [CommonAncestor]) + self.assertEqual( + Child._meta.get_parent_list(), [FirstParent, SecondParent, CommonAncestor] + ) + + +class PropertyNamesTests(SimpleTestCase): + def test_person(self): + # Instance only descriptors don't appear in _property_names. + self.assertEqual(BasePerson().test_instance_only_descriptor, 1) + with self.assertRaisesMessage(AttributeError, "Instance only"): + AbstractPerson.test_instance_only_descriptor + self.assertEqual( + AbstractPerson._meta._property_names, frozenset(["pk", "test_property"]) + ) + + +class ReturningFieldsTests(SimpleTestCase): + def test_pk(self): + self.assertEqual(Relation._meta.db_returning_fields, [Relation._meta.pk]) + + +class AbstractModelTests(SimpleTestCase): + def test_abstract_model_not_instantiated(self): + msg = "Abstract models cannot be instantiated." + with self.assertRaisesMessage(TypeError, msg): + AbstractPerson() diff --git a/testbed/django__django/tests/model_options/__init__.py b/testbed/django__django/tests/model_options/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_options/apps.py b/testbed/django__django/tests/model_options/apps.py new file mode 100644 index 0000000000000000000000000000000000000000..fdb9c6482179e8797e62e428f9504e4ee744ea16 --- /dev/null +++ b/testbed/django__django/tests/model_options/apps.py @@ -0,0 +1,25 @@ +from django.apps import AppConfig + + +class ModelDefaultPKConfig(AppConfig): + name = "model_options" + + +class ModelPKConfig(AppConfig): + name = "model_options" + default_auto_field = "django.db.models.SmallAutoField" + + +class ModelPKNonAutoConfig(AppConfig): + name = "model_options" + default_auto_field = "django.db.models.TextField" + + +class ModelPKNoneConfig(AppConfig): + name = "model_options" + default_auto_field = None + + +class ModelPKNonexistentConfig(AppConfig): + name = "model_options" + default_auto_field = "django.db.models.NonexistentAutoField" diff --git a/testbed/django__django/tests/model_options/models/__init__.py b/testbed/django__django/tests/model_options/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_options/models/default_related_name.py b/testbed/django__django/tests/model_options/models/default_related_name.py new file mode 100644 index 0000000000000000000000000000000000000000..d4687dc6a206a095b26ddd412ec6ae835ee9f070 --- /dev/null +++ b/testbed/django__django/tests/model_options/models/default_related_name.py @@ -0,0 +1,41 @@ +from django.db import models + + +class Author(models.Model): + first_name = models.CharField(max_length=128) + last_name = models.CharField(max_length=128) + + +class Editor(models.Model): + name = models.CharField(max_length=128) + bestselling_author = models.ForeignKey(Author, models.CASCADE) + + +class Book(models.Model): + title = models.CharField(max_length=128) + authors = models.ManyToManyField(Author) + editor = models.ForeignKey(Editor, models.CASCADE, related_name="edited_books") + + class Meta: + default_related_name = "books" + + +class Store(models.Model): + name = models.CharField(max_length=128) + address = models.CharField(max_length=128) + + class Meta: + abstract = True + default_related_name = "%(app_label)s_%(model_name)ss" + + +class BookStore(Store): + available_books = models.ManyToManyField(Book) + + +class EditorStore(Store): + editor = models.ForeignKey(Editor, models.CASCADE) + available_books = models.ManyToManyField(Book) + + class Meta: + default_related_name = "editor_stores" diff --git a/testbed/django__django/tests/model_options/models/tablespaces.py b/testbed/django__django/tests/model_options/models/tablespaces.py new file mode 100644 index 0000000000000000000000000000000000000000..19bfd3189037bac9fbe036f07a58295e1ebc1ab8 --- /dev/null +++ b/testbed/django__django/tests/model_options/models/tablespaces.py @@ -0,0 +1,53 @@ +from django.db import models + +# Since the test database doesn't have tablespaces, it's impossible for Django +# to create the tables for models where db_tablespace is set. To avoid this +# problem, we mark the models as unmanaged, and temporarily revert them to +# managed during each test. We also set them to use the same tables as the +# "reference" models to avoid errors when other tests run 'migrate' +# (proxy_models_inheritance does). + + +class ScientistRef(models.Model): + name = models.CharField(max_length=50) + + +class ArticleRef(models.Model): + title = models.CharField(max_length=50, unique=True) + code = models.CharField(max_length=50, unique=True) + authors = models.ManyToManyField(ScientistRef, related_name="articles_written_set") + reviewers = models.ManyToManyField( + ScientistRef, related_name="articles_reviewed_set" + ) + + +class Scientist(models.Model): + name = models.CharField(max_length=50) + + class Meta: + db_table = "model_options_scientistref" + db_tablespace = "tbl_tbsp" + managed = False + + +class Article(models.Model): + title = models.CharField(max_length=50, unique=True) + code = models.CharField(max_length=50, unique=True, db_tablespace="idx_tbsp") + authors = models.ManyToManyField(Scientist, related_name="articles_written_set") + reviewers = models.ManyToManyField( + Scientist, related_name="articles_reviewed_set", db_tablespace="idx_tbsp" + ) + + class Meta: + db_table = "model_options_articleref" + db_tablespace = "tbl_tbsp" + managed = False + + +# Also set the tables for automatically created models + +Authors = Article._meta.get_field("authors").remote_field.through +Authors._meta.db_table = "model_options_articleref_authors" + +Reviewers = Article._meta.get_field("reviewers").remote_field.through +Reviewers._meta.db_table = "model_options_articleref_reviewers" diff --git a/testbed/django__django/tests/model_options/test_default_pk.py b/testbed/django__django/tests/model_options/test_default_pk.py new file mode 100644 index 0000000000000000000000000000000000000000..896eddd828372ca0bd525999641acfe903e3963a --- /dev/null +++ b/testbed/django__django/tests/model_options/test_default_pk.py @@ -0,0 +1,120 @@ +from django.core.exceptions import ImproperlyConfigured +from django.db import models +from django.test import SimpleTestCase, override_settings +from django.test.utils import isolate_apps + + +class MyBigAutoField(models.BigAutoField): + pass + + +@isolate_apps("model_options") +class TestDefaultPK(SimpleTestCase): + @override_settings(DEFAULT_AUTO_FIELD="django.db.models.NonexistentAutoField") + def test_default_auto_field_setting_nonexistent(self): + msg = ( + "DEFAULT_AUTO_FIELD refers to the module " + "'django.db.models.NonexistentAutoField' that could not be " + "imported." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + + class Model(models.Model): + pass + + @isolate_apps("model_options.apps.ModelPKNonexistentConfig") + def test_app_default_auto_field_nonexistent(self): + msg = ( + "model_options.apps.ModelPKNonexistentConfig.default_auto_field " + "refers to the module 'django.db.models.NonexistentAutoField' " + "that could not be imported." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + + class Model(models.Model): + pass + + @override_settings(DEFAULT_AUTO_FIELD="django.db.models.TextField") + def test_default_auto_field_setting_non_auto(self): + msg = ( + "Primary key 'django.db.models.TextField' referred by " + "DEFAULT_AUTO_FIELD must subclass AutoField." + ) + with self.assertRaisesMessage(ValueError, msg): + + class Model(models.Model): + pass + + @isolate_apps("model_options.apps.ModelPKNonAutoConfig") + def test_app_default_auto_field_non_auto(self): + msg = ( + "Primary key 'django.db.models.TextField' referred by " + "model_options.apps.ModelPKNonAutoConfig.default_auto_field must " + "subclass AutoField." + ) + with self.assertRaisesMessage(ValueError, msg): + + class Model(models.Model): + pass + + @override_settings(DEFAULT_AUTO_FIELD=None) + def test_default_auto_field_setting_none(self): + msg = "DEFAULT_AUTO_FIELD must not be empty." + with self.assertRaisesMessage(ImproperlyConfigured, msg): + + class Model(models.Model): + pass + + @isolate_apps("model_options.apps.ModelPKNoneConfig") + def test_app_default_auto_field_none(self): + msg = ( + "model_options.apps.ModelPKNoneConfig.default_auto_field must not " + "be empty." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + + class Model(models.Model): + pass + + @isolate_apps("model_options.apps.ModelDefaultPKConfig") + @override_settings(DEFAULT_AUTO_FIELD="django.db.models.SmallAutoField") + def test_default_auto_field_setting(self): + class Model(models.Model): + pass + + self.assertIsInstance(Model._meta.pk, models.SmallAutoField) + + @override_settings( + DEFAULT_AUTO_FIELD="model_options.test_default_pk.MyBigAutoField" + ) + def test_default_auto_field_setting_bigautofield_subclass(self): + class Model(models.Model): + pass + + self.assertIsInstance(Model._meta.pk, MyBigAutoField) + + @isolate_apps("model_options.apps.ModelPKConfig") + @override_settings(DEFAULT_AUTO_FIELD="django.db.models.AutoField") + def test_app_default_auto_field(self): + class Model(models.Model): + pass + + self.assertIsInstance(Model._meta.pk, models.SmallAutoField) + + @isolate_apps("model_options.apps.ModelDefaultPKConfig") + @override_settings(DEFAULT_AUTO_FIELD="django.db.models.SmallAutoField") + def test_m2m_default_auto_field_setting(self): + class M2MModel(models.Model): + m2m = models.ManyToManyField("self") + + m2m_pk = M2MModel._meta.get_field("m2m").remote_field.through._meta.pk + self.assertIsInstance(m2m_pk, models.SmallAutoField) + + @isolate_apps("model_options.apps.ModelPKConfig") + @override_settings(DEFAULT_AUTO_FIELD="django.db.models.AutoField") + def test_m2m_app_default_auto_field(self): + class M2MModel(models.Model): + m2m = models.ManyToManyField("self") + + m2m_pk = M2MModel._meta.get_field("m2m").remote_field.through._meta.pk + self.assertIsInstance(m2m_pk, models.SmallAutoField) diff --git a/testbed/django__django/tests/model_options/test_default_related_name.py b/testbed/django__django/tests/model_options/test_default_related_name.py new file mode 100644 index 0000000000000000000000000000000000000000..1a59899bb0364052bf0585680b324a7487165b1a --- /dev/null +++ b/testbed/django__django/tests/model_options/test_default_related_name.py @@ -0,0 +1,39 @@ +from django.core.exceptions import FieldError +from django.test import TestCase + +from .models.default_related_name import Author, Book, Editor + + +class DefaultRelatedNameTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.author = Author.objects.create(first_name="Dave", last_name="Loper") + cls.editor = Editor.objects.create( + name="Test Editions", bestselling_author=cls.author + ) + cls.book = Book.objects.create(title="Test Book", editor=cls.editor) + cls.book.authors.add(cls.author) + + def test_no_default_related_name(self): + self.assertEqual(list(self.author.editor_set.all()), [self.editor]) + + def test_default_related_name(self): + self.assertEqual(list(self.author.books.all()), [self.book]) + + def test_default_related_name_in_queryset_lookup(self): + self.assertEqual(Author.objects.get(books=self.book), self.author) + + def test_model_name_not_available_in_queryset_lookup(self): + msg = "Cannot resolve keyword 'book' into field." + with self.assertRaisesMessage(FieldError, msg): + Author.objects.get(book=self.book) + + def test_related_name_overrides_default_related_name(self): + self.assertEqual(list(self.editor.edited_books.all()), [self.book]) + + def test_inheritance(self): + # model_options is the name of the application for this test. + self.assertEqual(list(self.book.model_options_bookstores.all()), []) + + def test_inheritance_with_overridden_default_related_name(self): + self.assertEqual(list(self.book.editor_stores.all()), []) diff --git a/testbed/django__django/tests/model_options/test_tablespaces.py b/testbed/django__django/tests/model_options/test_tablespaces.py new file mode 100644 index 0000000000000000000000000000000000000000..0aa2e0fccfcede90e9de671400b1cbba2790f22d --- /dev/null +++ b/testbed/django__django/tests/model_options/test_tablespaces.py @@ -0,0 +1,135 @@ +from django.apps import apps +from django.conf import settings +from django.db import connection +from django.test import TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature + +from .models.tablespaces import ( + Article, + ArticleRef, + Authors, + Reviewers, + Scientist, + ScientistRef, +) + + +def sql_for_table(model): + with connection.schema_editor(collect_sql=True) as editor: + editor.create_model(model) + return editor.collected_sql[0] + + +def sql_for_index(model): + return "\n".join( + str(sql) for sql in connection.schema_editor()._model_indexes_sql(model) + ) + + +# We can't test the DEFAULT_TABLESPACE and DEFAULT_INDEX_TABLESPACE settings +# because they're evaluated when the model class is defined. As a consequence, +# @override_settings doesn't work, and the tests depend +class TablespacesTests(TransactionTestCase): + available_apps = ["model_options"] + + def setUp(self): + # The unmanaged models need to be removed after the test in order to + # prevent bad interactions with the flush operation in other tests. + self._old_models = apps.app_configs["model_options"].models.copy() + + for model in Article, Authors, Reviewers, Scientist: + model._meta.managed = True + + def tearDown(self): + for model in Article, Authors, Reviewers, Scientist: + model._meta.managed = False + + apps.app_configs["model_options"].models = self._old_models + apps.all_models["model_options"] = self._old_models + apps.clear_cache() + + def assertNumContains(self, haystack, needle, count): + real_count = haystack.count(needle) + self.assertEqual( + real_count, + count, + "Found %d instances of '%s', expected %d" % (real_count, needle, count), + ) + + @skipUnlessDBFeature("supports_tablespaces") + def test_tablespace_for_model(self): + sql = sql_for_table(Scientist).lower() + if settings.DEFAULT_INDEX_TABLESPACE: + # 1 for the table + self.assertNumContains(sql, "tbl_tbsp", 1) + # 1 for the index on the primary key + self.assertNumContains(sql, settings.DEFAULT_INDEX_TABLESPACE, 1) + else: + # 1 for the table + 1 for the index on the primary key + self.assertNumContains(sql, "tbl_tbsp", 2) + + @skipIfDBFeature("supports_tablespaces") + def test_tablespace_ignored_for_model(self): + # No tablespace-related SQL + self.assertEqual(sql_for_table(Scientist), sql_for_table(ScientistRef)) + + @skipUnlessDBFeature("supports_tablespaces") + def test_tablespace_for_indexed_field(self): + sql = sql_for_table(Article).lower() + if settings.DEFAULT_INDEX_TABLESPACE: + # 1 for the table + self.assertNumContains(sql, "tbl_tbsp", 1) + # 1 for the primary key + 1 for the index on code + self.assertNumContains(sql, settings.DEFAULT_INDEX_TABLESPACE, 2) + else: + # 1 for the table + 1 for the primary key + 1 for the index on code + self.assertNumContains(sql, "tbl_tbsp", 3) + + # 1 for the index on reference + self.assertNumContains(sql, "idx_tbsp", 1) + + @skipIfDBFeature("supports_tablespaces") + def test_tablespace_ignored_for_indexed_field(self): + # No tablespace-related SQL + self.assertEqual(sql_for_table(Article), sql_for_table(ArticleRef)) + + @skipUnlessDBFeature("supports_tablespaces") + def test_tablespace_for_many_to_many_field(self): + sql = sql_for_table(Authors).lower() + # The join table of the ManyToManyField goes to the model's tablespace, + # and its indexes too, unless DEFAULT_INDEX_TABLESPACE is set. + if settings.DEFAULT_INDEX_TABLESPACE: + # 1 for the table + self.assertNumContains(sql, "tbl_tbsp", 1) + # 1 for the primary key + self.assertNumContains(sql, settings.DEFAULT_INDEX_TABLESPACE, 1) + else: + # 1 for the table + 1 for the index on the primary key + self.assertNumContains(sql, "tbl_tbsp", 2) + self.assertNumContains(sql, "idx_tbsp", 0) + + sql = sql_for_index(Authors).lower() + # The ManyToManyField declares no db_tablespace, its indexes go to + # the model's tablespace, unless DEFAULT_INDEX_TABLESPACE is set. + if settings.DEFAULT_INDEX_TABLESPACE: + self.assertNumContains(sql, settings.DEFAULT_INDEX_TABLESPACE, 2) + else: + self.assertNumContains(sql, "tbl_tbsp", 2) + self.assertNumContains(sql, "idx_tbsp", 0) + + sql = sql_for_table(Reviewers).lower() + # The join table of the ManyToManyField goes to the model's tablespace, + # and its indexes too, unless DEFAULT_INDEX_TABLESPACE is set. + if settings.DEFAULT_INDEX_TABLESPACE: + # 1 for the table + self.assertNumContains(sql, "tbl_tbsp", 1) + # 1 for the primary key + self.assertNumContains(sql, settings.DEFAULT_INDEX_TABLESPACE, 1) + else: + # 1 for the table + 1 for the index on the primary key + self.assertNumContains(sql, "tbl_tbsp", 2) + self.assertNumContains(sql, "idx_tbsp", 0) + + sql = sql_for_index(Reviewers).lower() + # The ManyToManyField declares db_tablespace, its indexes go there. + self.assertNumContains(sql, "tbl_tbsp", 0) + self.assertNumContains(sql, "idx_tbsp", 2) diff --git a/testbed/django__django/tests/model_package/__init__.py b/testbed/django__django/tests/model_package/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_package/models/__init__.py b/testbed/django__django/tests/model_package/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b4a637f6eb7141293fc76f894cf5fbaf7d51f48e --- /dev/null +++ b/testbed/django__django/tests/model_package/models/__init__.py @@ -0,0 +1,5 @@ +# Import all the models from subpackages +from .article import Article +from .publication import Publication + +__all__ = ["Article", "Publication"] diff --git a/testbed/django__django/tests/model_package/models/article.py b/testbed/django__django/tests/model_package/models/article.py new file mode 100644 index 0000000000000000000000000000000000000000..f664dc08c5f4f5196ed8ed27f0b0711743d3c2bd --- /dev/null +++ b/testbed/django__django/tests/model_package/models/article.py @@ -0,0 +1,11 @@ +from django.db import models + + +class Site(models.Model): + name = models.CharField(max_length=100) + + +class Article(models.Model): + sites = models.ManyToManyField(Site) + headline = models.CharField(max_length=100) + publications = models.ManyToManyField("model_package.Publication", blank=True) diff --git a/testbed/django__django/tests/model_package/models/publication.py b/testbed/django__django/tests/model_package/models/publication.py new file mode 100644 index 0000000000000000000000000000000000000000..b0a49a94a19a15eac9bb647299826f0dc608dcd9 --- /dev/null +++ b/testbed/django__django/tests/model_package/models/publication.py @@ -0,0 +1,5 @@ +from django.db import models + + +class Publication(models.Model): + title = models.CharField(max_length=30) diff --git a/testbed/django__django/tests/model_package/tests.py b/testbed/django__django/tests/model_package/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..aa625465a6a0618f1d18fea190e806a5c12a6020 --- /dev/null +++ b/testbed/django__django/tests/model_package/tests.py @@ -0,0 +1,74 @@ +from django.db import connection, models +from django.db.backends.utils import truncate_name +from django.test import TestCase + +from .models.article import Article, Site +from .models.publication import Publication + + +class Advertisement(models.Model): + customer = models.CharField(max_length=100) + publications = models.ManyToManyField("model_package.Publication", blank=True) + + +class ModelPackageTests(TestCase): + def test_m2m_tables_in_subpackage_models(self): + """ + Regression for #12168: models split into subpackages still get M2M + tables. + """ + p = Publication.objects.create(title="FooBar") + + site = Site.objects.create(name="example.com") + + a = Article.objects.create(headline="a foo headline") + a.publications.add(p) + a.sites.add(site) + + a = Article.objects.get(id=a.pk) + self.assertEqual(a.id, a.pk) + self.assertEqual(a.sites.count(), 1) + + def test_models_in_the_test_package(self): + """ + Regression for #12245 - Models can exist in the test package, too. + """ + p = Publication.objects.create(title="FooBar") + ad = Advertisement.objects.create(customer="Lawrence Journal-World") + ad.publications.add(p) + + ad = Advertisement.objects.get(id=ad.pk) + self.assertEqual(ad.publications.count(), 1) + + def test_automatic_m2m_column_names(self): + """ + Regression for #12386 - field names on the autogenerated intermediate + class that are specified as dotted strings don't retain any path + component for the field or column name. + """ + self.assertEqual(Article.publications.through._meta.fields[1].name, "article") + self.assertEqual( + Article.publications.through._meta.fields[1].get_attname_column(), + ("article_id", "article_id"), + ) + self.assertEqual( + Article.publications.through._meta.fields[2].name, "publication" + ) + self.assertEqual( + Article.publications.through._meta.fields[2].get_attname_column(), + ("publication_id", "publication_id"), + ) + + self.assertEqual( + Article._meta.get_field("publications").m2m_db_table(), + truncate_name( + "model_package_article_publications", connection.ops.max_name_length() + ), + ) + + self.assertEqual( + Article._meta.get_field("publications").m2m_column_name(), "article_id" + ) + self.assertEqual( + Article._meta.get_field("publications").m2m_reverse_name(), "publication_id" + ) diff --git a/testbed/django__django/tests/model_regress/__init__.py b/testbed/django__django/tests/model_regress/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_regress/models.py b/testbed/django__django/tests/model_regress/models.py new file mode 100644 index 0000000000000000000000000000000000000000..350850393a2eccf345b4bb2439aa391f04b086cc --- /dev/null +++ b/testbed/django__django/tests/model_regress/models.py @@ -0,0 +1,62 @@ +from django.db import models + + +class Article(models.Model): + CHOICES = ( + (1, "first"), + (2, "second"), + ) + headline = models.CharField(max_length=100, default="Default headline") + pub_date = models.DateTimeField() + status = models.IntegerField(blank=True, null=True, choices=CHOICES) + misc_data = models.CharField(max_length=100, blank=True) + article_text = models.TextField() + + class Meta: + ordering = ("pub_date", "headline") + # A utf-8 verbose name (Ångström's Articles) to test they are valid. + verbose_name = "\xc3\x85ngstr\xc3\xb6m's Articles" + + +class Movie(models.Model): + # Test models with non-default primary keys / AutoFields #5218 + movie_id = models.AutoField(primary_key=True) + name = models.CharField(max_length=60) + + +class Party(models.Model): + when = models.DateField(null=True) + + +class Event(models.Model): + when = models.DateTimeField() + + +class Department(models.Model): + id = models.PositiveIntegerField(primary_key=True) + name = models.CharField(max_length=200) + + +class Worker(models.Model): + department = models.ForeignKey(Department, models.CASCADE) + name = models.CharField(max_length=200) + + def __str__(self): + return self.name + + +class NonAutoPK(models.Model): + name = models.CharField(max_length=10, primary_key=True) + + +# Chained foreign keys with to_field produce incorrect query #18432 +class Model1(models.Model): + pkey = models.IntegerField(unique=True, db_index=True) + + +class Model2(models.Model): + model1 = models.ForeignKey(Model1, models.CASCADE, unique=True, to_field="pkey") + + +class Model3(models.Model): + model2 = models.ForeignKey(Model2, models.CASCADE, unique=True, to_field="model1") diff --git a/testbed/django__django/tests/model_regress/test_pickle.py b/testbed/django__django/tests/model_regress/test_pickle.py new file mode 100644 index 0000000000000000000000000000000000000000..1beebc6330cc5af8163f20391d4daa5e54844505 --- /dev/null +++ b/testbed/django__django/tests/model_regress/test_pickle.py @@ -0,0 +1,68 @@ +import pickle + +import django +from django.db import DJANGO_VERSION_PICKLE_KEY, models +from django.test import SimpleTestCase + + +class ModelPickleTests(SimpleTestCase): + def test_missing_django_version_unpickling(self): + """ + #21430 -- Verifies a warning is raised for models that are + unpickled without a Django version + """ + + class MissingDjangoVersion(models.Model): + title = models.CharField(max_length=10) + + def __reduce__(self): + reduce_list = super().__reduce__() + data = reduce_list[-1] + del data[DJANGO_VERSION_PICKLE_KEY] + return reduce_list + + p = MissingDjangoVersion(title="FooBar") + msg = "Pickled model instance's Django version is not specified." + with self.assertRaisesMessage(RuntimeWarning, msg): + pickle.loads(pickle.dumps(p)) + + def test_unsupported_unpickle(self): + """ + #21430 -- Verifies a warning is raised for models that are + unpickled with a different Django version than the current + """ + + class DifferentDjangoVersion(models.Model): + title = models.CharField(max_length=10) + + def __reduce__(self): + reduce_list = super().__reduce__() + data = reduce_list[-1] + data[DJANGO_VERSION_PICKLE_KEY] = "1.0" + return reduce_list + + p = DifferentDjangoVersion(title="FooBar") + msg = ( + "Pickled model instance's Django version 1.0 does not match the " + "current version %s." % django.__version__ + ) + with self.assertRaisesMessage(RuntimeWarning, msg): + pickle.loads(pickle.dumps(p)) + + def test_with_getstate(self): + """ + A model may override __getstate__() to choose the attributes to pickle. + """ + + class PickledModel(models.Model): + def __getstate__(self): + state = super().__getstate__().copy() + del state["dont_pickle"] + return state + + m = PickledModel() + m.dont_pickle = 1 + dumped = pickle.dumps(m) + self.assertEqual(m.dont_pickle, 1) + reloaded = pickle.loads(dumped) + self.assertFalse(hasattr(reloaded, "dont_pickle")) diff --git a/testbed/django__django/tests/model_regress/test_state.py b/testbed/django__django/tests/model_regress/test_state.py new file mode 100644 index 0000000000000000000000000000000000000000..8b0f08782919c7a203bae0f0096221045a321498 --- /dev/null +++ b/testbed/django__django/tests/model_regress/test_state.py @@ -0,0 +1,7 @@ +from django.db.models.base import ModelState, ModelStateFieldsCacheDescriptor +from django.test import SimpleTestCase + + +class ModelStateTests(SimpleTestCase): + def test_fields_cache_descriptor(self): + self.assertIsInstance(ModelState.fields_cache, ModelStateFieldsCacheDescriptor) diff --git a/testbed/django__django/tests/model_regress/tests.py b/testbed/django__django/tests/model_regress/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..7feab480dd6415636dd07d48a2a4d7cfb20135d5 --- /dev/null +++ b/testbed/django__django/tests/model_regress/tests.py @@ -0,0 +1,283 @@ +import copy +import datetime +from operator import attrgetter + +from django.core.exceptions import ValidationError +from django.db import models, router +from django.db.models.sql import InsertQuery +from django.test import TestCase, skipUnlessDBFeature +from django.test.utils import isolate_apps +from django.utils.timezone import get_fixed_timezone + +from .models import ( + Article, + Department, + Event, + Model1, + Model2, + Model3, + NonAutoPK, + Party, + Worker, +) + + +class ModelTests(TestCase): + def test_model_init_too_many_args(self): + msg = "Number of args exceeds number of fields" + with self.assertRaisesMessage(IndexError, msg): + Worker(1, 2, 3, 4) + + # The bug is that the following queries would raise: + # "TypeError: Related Field has invalid lookup: gte" + def test_related_gte_lookup(self): + """ + Regression test for #10153: foreign key __gte lookups. + """ + Worker.objects.filter(department__gte=0) + + def test_related_lte_lookup(self): + """ + Regression test for #10153: foreign key __lte lookups. + """ + Worker.objects.filter(department__lte=0) + + def test_sql_insert_compiler_return_id_attribute(self): + """ + Regression test for #14019: SQLInsertCompiler.as_sql() failure + """ + db = router.db_for_write(Party) + query = InsertQuery(Party) + query.insert_values([Party._meta.fields[0]], [], raw=False) + # this line will raise an AttributeError without the accompanying fix + query.get_compiler(using=db).as_sql() + + def test_empty_choice(self): + # NOTE: Part of the regression test here is merely parsing the model + # declaration. The verbose_name, in particular, did not always work. + a = Article.objects.create( + headline="Look at me!", pub_date=datetime.datetime.now() + ) + # An empty choice field should return None for the display name. + self.assertIs(a.get_status_display(), None) + + # Empty strings should be returned as string + a = Article.objects.get(pk=a.pk) + self.assertEqual(a.misc_data, "") + + def test_long_textfield(self): + # TextFields can hold more than 4000 characters (this was broken in + # Oracle). + a = Article.objects.create( + headline="Really, really big", + pub_date=datetime.datetime.now(), + article_text="ABCDE" * 1000, + ) + a = Article.objects.get(pk=a.pk) + self.assertEqual(len(a.article_text), 5000) + + def test_long_unicode_textfield(self): + # TextFields can hold more than 4000 bytes also when they are + # less than 4000 characters + a = Article.objects.create( + headline="Really, really big", + pub_date=datetime.datetime.now(), + article_text="\u05d0\u05d1\u05d2" * 1000, + ) + a = Article.objects.get(pk=a.pk) + self.assertEqual(len(a.article_text), 3000) + + def test_date_lookup(self): + # Regression test for #659 + Party.objects.create(when=datetime.datetime(1999, 12, 31)) + Party.objects.create(when=datetime.datetime(1998, 12, 31)) + Party.objects.create(when=datetime.datetime(1999, 1, 1)) + Party.objects.create(when=datetime.datetime(1, 3, 3)) + self.assertQuerySetEqual(Party.objects.filter(when__month=2), []) + self.assertQuerySetEqual( + Party.objects.filter(when__month=1), + [datetime.date(1999, 1, 1)], + attrgetter("when"), + ) + self.assertQuerySetEqual( + Party.objects.filter(when__month=12), + [ + datetime.date(1999, 12, 31), + datetime.date(1998, 12, 31), + ], + attrgetter("when"), + ordered=False, + ) + self.assertQuerySetEqual( + Party.objects.filter(when__year=1998), + [ + datetime.date(1998, 12, 31), + ], + attrgetter("when"), + ) + # Regression test for #8510 + self.assertQuerySetEqual( + Party.objects.filter(when__day="31"), + [ + datetime.date(1999, 12, 31), + datetime.date(1998, 12, 31), + ], + attrgetter("when"), + ordered=False, + ) + self.assertQuerySetEqual( + Party.objects.filter(when__month="12"), + [ + datetime.date(1999, 12, 31), + datetime.date(1998, 12, 31), + ], + attrgetter("when"), + ordered=False, + ) + self.assertQuerySetEqual( + Party.objects.filter(when__year="1998"), + [ + datetime.date(1998, 12, 31), + ], + attrgetter("when"), + ) + + # Regression test for #18969 + self.assertQuerySetEqual( + Party.objects.filter(when__year=1), + [ + datetime.date(1, 3, 3), + ], + attrgetter("when"), + ) + self.assertQuerySetEqual( + Party.objects.filter(when__year="1"), + [ + datetime.date(1, 3, 3), + ], + attrgetter("when"), + ) + + def test_date_filter_null(self): + # Date filtering was failing with NULL date values in SQLite + # (regression test for #3501, among other things). + Party.objects.create(when=datetime.datetime(1999, 1, 1)) + Party.objects.create() + p = Party.objects.filter(when__month=1)[0] + self.assertEqual(p.when, datetime.date(1999, 1, 1)) + self.assertQuerySetEqual( + Party.objects.filter(pk=p.pk).dates("when", "month"), + [1], + attrgetter("month"), + ) + + def test_get_next_prev_by_field(self): + # get_next_by_FIELD() and get_previous_by_FIELD() don't crash when + # microseconds values are stored in the database. + Event.objects.create(when=datetime.datetime(2000, 1, 1, 16, 0, 0)) + Event.objects.create(when=datetime.datetime(2000, 1, 1, 6, 1, 1)) + Event.objects.create(when=datetime.datetime(2000, 1, 1, 13, 1, 1)) + e = Event.objects.create(when=datetime.datetime(2000, 1, 1, 12, 0, 20, 24)) + self.assertEqual( + e.get_next_by_when().when, datetime.datetime(2000, 1, 1, 13, 1, 1) + ) + self.assertEqual( + e.get_previous_by_when().when, datetime.datetime(2000, 1, 1, 6, 1, 1) + ) + + def test_get_next_prev_by_field_unsaved(self): + msg = "get_next/get_previous cannot be used on unsaved objects." + with self.assertRaisesMessage(ValueError, msg): + Event().get_next_by_when() + with self.assertRaisesMessage(ValueError, msg): + Event().get_previous_by_when() + + def test_primary_key_foreign_key_types(self): + # Check Department and Worker (non-default PK type) + d = Department.objects.create(id=10, name="IT") + w = Worker.objects.create(department=d, name="Full-time") + self.assertEqual(str(w), "Full-time") + + @skipUnlessDBFeature("supports_timezones") + def test_timezones(self): + # Saving and updating with timezone-aware datetime Python objects. + # Regression test for #10443. + # The idea is that all these creations and saving should work without + # crashing. It's not rocket science. + dt1 = datetime.datetime(2008, 8, 31, 16, 20, tzinfo=get_fixed_timezone(600)) + dt2 = datetime.datetime(2008, 8, 31, 17, 20, tzinfo=get_fixed_timezone(600)) + obj = Article.objects.create( + headline="A headline", pub_date=dt1, article_text="foo" + ) + obj.pub_date = dt2 + obj.save() + self.assertEqual( + Article.objects.filter(headline="A headline").update(pub_date=dt1), 1 + ) + + def test_chained_fks(self): + """ + Chained foreign keys with to_field produce incorrect query. + """ + + m1 = Model1.objects.create(pkey=1000) + m2 = Model2.objects.create(model1=m1) + m3 = Model3.objects.create(model2=m2) + + # this is the actual test for #18432 + m3 = Model3.objects.get(model2=1000) + m3.model2 + + @isolate_apps("model_regress") + def test_metaclass_can_access_attribute_dict(self): + """ + Model metaclasses have access to the class attribute dict in + __init__() (#30254). + """ + + class HorseBase(models.base.ModelBase): + def __init__(cls, name, bases, attrs): + super().__init__(name, bases, attrs) + cls.horns = 1 if "magic" in attrs else 0 + + class Horse(models.Model, metaclass=HorseBase): + name = models.CharField(max_length=255) + magic = True + + self.assertEqual(Horse.horns, 1) + + +class ModelValidationTest(TestCase): + def test_pk_validation(self): + NonAutoPK.objects.create(name="one") + again = NonAutoPK(name="one") + with self.assertRaises(ValidationError): + again.validate_unique() + + +class EvaluateMethodTest(TestCase): + """ + Regression test for #13640: cannot filter by objects with 'evaluate' attr + """ + + def test_model_with_evaluate_method(self): + """ + You can filter by objects that have an 'evaluate' attr + """ + dept = Department.objects.create(pk=1, name="abc") + dept.evaluate = "abc" + Worker.objects.filter(department=dept) + + +class ModelFieldsCacheTest(TestCase): + def test_fields_cache_reset_on_copy(self): + department1 = Department.objects.create(id=1, name="department1") + department2 = Department.objects.create(id=2, name="department2") + worker1 = Worker.objects.create(name="worker", department=department1) + worker2 = copy.copy(worker1) + + self.assertEqual(worker2.department, department1) + # Changing related fields doesn't mutate the base object. + worker2.department = department2 + self.assertEqual(worker2.department, department2) + self.assertEqual(worker1.department, department1) diff --git a/testbed/django__django/tests/model_utils/__init__.py b/testbed/django__django/tests/model_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/model_utils/tests.py b/testbed/django__django/tests/model_utils/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..4f1db84f3b03f7d88b7c254a8351d32e14ce8b8c --- /dev/null +++ b/testbed/django__django/tests/model_utils/tests.py @@ -0,0 +1,10 @@ +from django.db.models.utils import create_namedtuple_class +from django.test import SimpleTestCase + + +class NamedTupleClassTests(SimpleTestCase): + def test_immutability(self): + row_class = create_namedtuple_class("field1", "field2") + row = row_class("value1", "value2") + with self.assertRaises(AttributeError): + row.field3 = "value3" diff --git a/testbed/django__django/tests/modeladmin/__init__.py b/testbed/django__django/tests/modeladmin/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/modeladmin/models.py b/testbed/django__django/tests/modeladmin/models.py new file mode 100644 index 0000000000000000000000000000000000000000..51ebca768c50b154b798c3379292aae13e8b35d8 --- /dev/null +++ b/testbed/django__django/tests/modeladmin/models.py @@ -0,0 +1,56 @@ +from django.contrib.auth.models import User +from django.db import models + + +class Band(models.Model): + name = models.CharField(max_length=100) + bio = models.TextField() + sign_date = models.DateField() + + class Meta: + ordering = ("name",) + + def __str__(self): + return self.name + + +class Song(models.Model): + name = models.CharField(max_length=100) + band = models.ForeignKey(Band, models.CASCADE) + featuring = models.ManyToManyField(Band, related_name="featured") + + def __str__(self): + return self.name + + +class Concert(models.Model): + main_band = models.ForeignKey(Band, models.CASCADE, related_name="main_concerts") + opening_band = models.ForeignKey( + Band, models.CASCADE, related_name="opening_concerts", blank=True + ) + day = models.CharField(max_length=3, choices=((1, "Fri"), (2, "Sat"))) + transport = models.CharField( + max_length=100, choices=((1, "Plane"), (2, "Train"), (3, "Bus")), blank=True + ) + + +class ValidationTestModel(models.Model): + name = models.CharField(max_length=100) + slug = models.SlugField() + users = models.ManyToManyField(User) + state = models.CharField( + max_length=2, choices=(("CO", "Colorado"), ("WA", "Washington")) + ) + is_active = models.BooleanField(default=False) + pub_date = models.DateTimeField() + band = models.ForeignKey(Band, models.CASCADE) + best_friend = models.OneToOneField(User, models.CASCADE, related_name="best_friend") + # This field is intentionally 2 characters long (#16080). + no = models.IntegerField(verbose_name="Number", blank=True, null=True) + + def decade_published_in(self): + return self.pub_date.strftime("%Y")[:3] + "0's" + + +class ValidationTestInlineModel(models.Model): + parent = models.ForeignKey(ValidationTestModel, models.CASCADE) diff --git a/testbed/django__django/tests/modeladmin/test_actions.py b/testbed/django__django/tests/modeladmin/test_actions.py new file mode 100644 index 0000000000000000000000000000000000000000..fa3108ce08ad254373607d71f53933fa133500d2 --- /dev/null +++ b/testbed/django__django/tests/modeladmin/test_actions.py @@ -0,0 +1,150 @@ +from django.contrib import admin +from django.contrib.auth.models import Permission, User +from django.contrib.contenttypes.models import ContentType +from django.test import TestCase + +from .models import Band + + +class AdminActionsTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.superuser = User.objects.create_superuser( + username="super", password="secret", email="super@example.com" + ) + content_type = ContentType.objects.get_for_model(Band) + Permission.objects.create( + name="custom", codename="custom_band", content_type=content_type + ) + for user_type in ("view", "add", "change", "delete", "custom"): + username = "%suser" % user_type + user = User.objects.create_user( + username=username, password="secret", is_staff=True + ) + permission = Permission.objects.get( + codename="%s_band" % user_type, content_type=content_type + ) + user.user_permissions.add(permission) + setattr(cls, username, user) + + def test_get_actions_respects_permissions(self): + class MockRequest: + pass + + class BandAdmin(admin.ModelAdmin): + actions = ["custom_action"] + + @admin.action + def custom_action(modeladmin, request, queryset): + pass + + def has_custom_permission(self, request): + return request.user.has_perm("%s.custom_band" % self.opts.app_label) + + ma = BandAdmin(Band, admin.AdminSite()) + mock_request = MockRequest() + mock_request.GET = {} + cases = [ + (None, self.viewuser, ["custom_action"]), + ("view", self.superuser, ["delete_selected", "custom_action"]), + ("view", self.viewuser, ["custom_action"]), + ("add", self.adduser, ["custom_action"]), + ("change", self.changeuser, ["custom_action"]), + ("delete", self.deleteuser, ["delete_selected", "custom_action"]), + ("custom", self.customuser, ["custom_action"]), + ] + for permission, user, expected in cases: + with self.subTest(permission=permission, user=user): + if permission is None: + if hasattr(BandAdmin.custom_action, "allowed_permissions"): + del BandAdmin.custom_action.allowed_permissions + else: + BandAdmin.custom_action.allowed_permissions = (permission,) + mock_request.user = user + actions = ma.get_actions(mock_request) + self.assertEqual(list(actions.keys()), expected) + + def test_actions_inheritance(self): + class AdminBase(admin.ModelAdmin): + actions = ["custom_action"] + + @admin.action + def custom_action(modeladmin, request, queryset): + pass + + class AdminA(AdminBase): + pass + + class AdminB(AdminBase): + actions = None + + ma1 = AdminA(Band, admin.AdminSite()) + action_names = [name for _, name, _ in ma1._get_base_actions()] + self.assertEqual(action_names, ["delete_selected", "custom_action"]) + # `actions = None` removes actions from superclasses. + ma2 = AdminB(Band, admin.AdminSite()) + action_names = [name for _, name, _ in ma2._get_base_actions()] + self.assertEqual(action_names, ["delete_selected"]) + + def test_global_actions_description(self): + @admin.action(description="Site-wide admin action 1.") + def global_action_1(modeladmin, request, queryset): + pass + + @admin.action + def global_action_2(modeladmin, request, queryset): + pass + + admin_site = admin.AdminSite() + admin_site.add_action(global_action_1) + admin_site.add_action(global_action_2) + + class BandAdmin(admin.ModelAdmin): + pass + + ma = BandAdmin(Band, admin_site) + self.assertEqual( + [description for _, _, description in ma._get_base_actions()], + [ + "Delete selected %(verbose_name_plural)s", + "Site-wide admin action 1.", + "Global action 2", + ], + ) + + def test_actions_replace_global_action(self): + @admin.action(description="Site-wide admin action 1.") + def global_action_1(modeladmin, request, queryset): + pass + + @admin.action(description="Site-wide admin action 2.") + def global_action_2(modeladmin, request, queryset): + pass + + admin.site.add_action(global_action_1, name="custom_action_1") + admin.site.add_action(global_action_2, name="custom_action_2") + + @admin.action(description="Local admin action 1.") + def custom_action_1(modeladmin, request, queryset): + pass + + class BandAdmin(admin.ModelAdmin): + actions = [custom_action_1, "custom_action_2"] + + @admin.action(description="Local admin action 2.") + def custom_action_2(self, request, queryset): + pass + + ma = BandAdmin(Band, admin.site) + self.assertEqual(ma.check(), []) + self.assertEqual( + [ + desc + for _, name, desc in ma._get_base_actions() + if name.startswith("custom_action") + ], + [ + "Local admin action 1.", + "Local admin action 2.", + ], + ) diff --git a/testbed/django__django/tests/modeladmin/test_checks.py b/testbed/django__django/tests/modeladmin/test_checks.py new file mode 100644 index 0000000000000000000000000000000000000000..2ed27f8a3dde3262f85477a4b6e0cb9921c867c4 --- /dev/null +++ b/testbed/django__django/tests/modeladmin/test_checks.py @@ -0,0 +1,1673 @@ +from django import forms +from django.contrib import admin +from django.contrib.admin import BooleanFieldListFilter, SimpleListFilter +from django.contrib.admin.options import VERTICAL, ModelAdmin, TabularInline +from django.contrib.admin.sites import AdminSite +from django.core.checks import Error +from django.db.models import CASCADE, F, Field, ForeignKey, ManyToManyField, Model +from django.db.models.functions import Upper +from django.forms.models import BaseModelFormSet +from django.test import SimpleTestCase +from django.test.utils import isolate_apps + +from .models import Band, Song, User, ValidationTestInlineModel, ValidationTestModel + + +class CheckTestCase(SimpleTestCase): + def assertIsInvalid( + self, + model_admin, + model, + msg, + id=None, + hint=None, + invalid_obj=None, + admin_site=None, + ): + if admin_site is None: + admin_site = AdminSite() + invalid_obj = invalid_obj or model_admin + admin_obj = model_admin(model, admin_site) + self.assertEqual( + admin_obj.check(), [Error(msg, hint=hint, obj=invalid_obj, id=id)] + ) + + def assertIsInvalidRegexp( + self, model_admin, model, msg, id=None, hint=None, invalid_obj=None + ): + """ + Same as assertIsInvalid but treats the given msg as a regexp. + """ + invalid_obj = invalid_obj or model_admin + admin_obj = model_admin(model, AdminSite()) + errors = admin_obj.check() + self.assertEqual(len(errors), 1) + error = errors[0] + self.assertEqual(error.hint, hint) + self.assertEqual(error.obj, invalid_obj) + self.assertEqual(error.id, id) + self.assertRegex(error.msg, msg) + + def assertIsValid(self, model_admin, model, admin_site=None): + if admin_site is None: + admin_site = AdminSite() + admin_obj = model_admin(model, admin_site) + self.assertEqual(admin_obj.check(), []) + + +class RawIdCheckTests(CheckTestCase): + def test_not_iterable(self): + class TestModelAdmin(ModelAdmin): + raw_id_fields = 10 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'raw_id_fields' must be a list or tuple.", + "admin.E001", + ) + + def test_missing_field(self): + class TestModelAdmin(ModelAdmin): + raw_id_fields = ("non_existent_field",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'raw_id_fields[0]' refers to 'non_existent_field', " + "which is not a field of 'modeladmin.ValidationTestModel'.", + "admin.E002", + ) + + def test_invalid_field_type(self): + class TestModelAdmin(ModelAdmin): + raw_id_fields = ("name",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'raw_id_fields[0]' must be a foreign key or a " + "many-to-many field.", + "admin.E003", + ) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + raw_id_fields = ("users",) + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_field_attname(self): + class TestModelAdmin(ModelAdmin): + raw_id_fields = ["band_id"] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'raw_id_fields[0]' refers to 'band_id', which is " + "not a field of 'modeladmin.ValidationTestModel'.", + "admin.E002", + ) + + +class FieldsetsCheckTests(CheckTestCase): + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + fieldsets = (("General", {"fields": ("name",)}),) + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_not_iterable(self): + class TestModelAdmin(ModelAdmin): + fieldsets = 10 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'fieldsets' must be a list or tuple.", + "admin.E007", + ) + + def test_non_iterable_item(self): + class TestModelAdmin(ModelAdmin): + fieldsets = ({},) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'fieldsets[0]' must be a list or tuple.", + "admin.E008", + ) + + def test_item_not_a_pair(self): + class TestModelAdmin(ModelAdmin): + fieldsets = ((),) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'fieldsets[0]' must be of length 2.", + "admin.E009", + ) + + def test_second_element_of_item_not_a_dict(self): + class TestModelAdmin(ModelAdmin): + fieldsets = (("General", ()),) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'fieldsets[0][1]' must be a dictionary.", + "admin.E010", + ) + + def test_missing_fields_key(self): + class TestModelAdmin(ModelAdmin): + fieldsets = (("General", {}),) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'fieldsets[0][1]' must contain the key 'fields'.", + "admin.E011", + ) + + class TestModelAdmin(ModelAdmin): + fieldsets = (("General", {"fields": ("name",)}),) + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_specified_both_fields_and_fieldsets(self): + class TestModelAdmin(ModelAdmin): + fieldsets = (("General", {"fields": ("name",)}),) + fields = ["name"] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "Both 'fieldsets' and 'fields' are specified.", + "admin.E005", + ) + + def test_duplicate_fields(self): + class TestModelAdmin(ModelAdmin): + fieldsets = [(None, {"fields": ["name", "name"]})] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "There are duplicate field(s) in 'fieldsets[0][1]'.", + "admin.E012", + ) + + def test_duplicate_fields_in_fieldsets(self): + class TestModelAdmin(ModelAdmin): + fieldsets = [ + (None, {"fields": ["name"]}), + (None, {"fields": ["name"]}), + ] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "There are duplicate field(s) in 'fieldsets[1][1]'.", + "admin.E012", + ) + + def test_fieldsets_with_custom_form_validation(self): + class BandAdmin(ModelAdmin): + fieldsets = (("Band", {"fields": ("name",)}),) + + self.assertIsValid(BandAdmin, Band) + + +class FieldsCheckTests(CheckTestCase): + def test_duplicate_fields_in_fields(self): + class TestModelAdmin(ModelAdmin): + fields = ["name", "name"] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'fields' contains duplicate field(s).", + "admin.E006", + ) + + def test_inline(self): + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + fields = 10 + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'fields' must be a list or tuple.", + "admin.E004", + invalid_obj=ValidationTestInline, + ) + + +class FormCheckTests(CheckTestCase): + def test_invalid_type(self): + class FakeForm: + pass + + class TestModelAdmin(ModelAdmin): + form = FakeForm + + class TestModelAdminWithNoForm(ModelAdmin): + form = "not a form" + + for model_admin in (TestModelAdmin, TestModelAdminWithNoForm): + with self.subTest(model_admin): + self.assertIsInvalid( + model_admin, + ValidationTestModel, + "The value of 'form' must inherit from 'BaseModelForm'.", + "admin.E016", + ) + + def test_fieldsets_with_custom_form_validation(self): + class BandAdmin(ModelAdmin): + fieldsets = (("Band", {"fields": ("name",)}),) + + self.assertIsValid(BandAdmin, Band) + + def test_valid_case(self): + class AdminBandForm(forms.ModelForm): + delete = forms.BooleanField() + + class BandAdmin(ModelAdmin): + form = AdminBandForm + fieldsets = (("Band", {"fields": ("name", "bio", "sign_date", "delete")}),) + + self.assertIsValid(BandAdmin, Band) + + +class FilterVerticalCheckTests(CheckTestCase): + def test_not_iterable(self): + class TestModelAdmin(ModelAdmin): + filter_vertical = 10 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'filter_vertical' must be a list or tuple.", + "admin.E017", + ) + + def test_missing_field(self): + class TestModelAdmin(ModelAdmin): + filter_vertical = ("non_existent_field",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'filter_vertical[0]' refers to 'non_existent_field', " + "which is not a field of 'modeladmin.ValidationTestModel'.", + "admin.E019", + ) + + def test_invalid_field_type(self): + class TestModelAdmin(ModelAdmin): + filter_vertical = ("name",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'filter_vertical[0]' must be a many-to-many field.", + "admin.E020", + ) + + @isolate_apps("modeladmin") + def test_invalid_m2m_field_with_through(self): + class Artist(Model): + bands = ManyToManyField("Band", through="BandArtist") + + class BandArtist(Model): + artist = ForeignKey("Artist", on_delete=CASCADE) + band = ForeignKey("Band", on_delete=CASCADE) + + class TestModelAdmin(ModelAdmin): + filter_vertical = ["bands"] + + self.assertIsInvalid( + TestModelAdmin, + Artist, + "The value of 'filter_vertical[0]' cannot include the ManyToManyField " + "'bands', because that field manually specifies a relationship model.", + "admin.E013", + ) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + filter_vertical = ("users",) + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class FilterHorizontalCheckTests(CheckTestCase): + def test_not_iterable(self): + class TestModelAdmin(ModelAdmin): + filter_horizontal = 10 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'filter_horizontal' must be a list or tuple.", + "admin.E018", + ) + + def test_missing_field(self): + class TestModelAdmin(ModelAdmin): + filter_horizontal = ("non_existent_field",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'filter_horizontal[0]' refers to 'non_existent_field', " + "which is not a field of 'modeladmin.ValidationTestModel'.", + "admin.E019", + ) + + def test_invalid_field_type(self): + class TestModelAdmin(ModelAdmin): + filter_horizontal = ("name",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'filter_horizontal[0]' must be a many-to-many field.", + "admin.E020", + ) + + @isolate_apps("modeladmin") + def test_invalid_m2m_field_with_through(self): + class Artist(Model): + bands = ManyToManyField("Band", through="BandArtist") + + class BandArtist(Model): + artist = ForeignKey("Artist", on_delete=CASCADE) + band = ForeignKey("Band", on_delete=CASCADE) + + class TestModelAdmin(ModelAdmin): + filter_horizontal = ["bands"] + + self.assertIsInvalid( + TestModelAdmin, + Artist, + "The value of 'filter_horizontal[0]' cannot include the ManyToManyField " + "'bands', because that field manually specifies a relationship model.", + "admin.E013", + ) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + filter_horizontal = ("users",) + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class RadioFieldsCheckTests(CheckTestCase): + def test_not_dictionary(self): + class TestModelAdmin(ModelAdmin): + radio_fields = () + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'radio_fields' must be a dictionary.", + "admin.E021", + ) + + def test_missing_field(self): + class TestModelAdmin(ModelAdmin): + radio_fields = {"non_existent_field": VERTICAL} + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'radio_fields' refers to 'non_existent_field', " + "which is not a field of 'modeladmin.ValidationTestModel'.", + "admin.E022", + ) + + def test_invalid_field_type(self): + class TestModelAdmin(ModelAdmin): + radio_fields = {"name": VERTICAL} + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'radio_fields' refers to 'name', which is not an instance " + "of ForeignKey, and does not have a 'choices' definition.", + "admin.E023", + ) + + def test_invalid_value(self): + class TestModelAdmin(ModelAdmin): + radio_fields = {"state": None} + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'radio_fields[\"state\"]' must be either admin.HORIZONTAL or " + "admin.VERTICAL.", + "admin.E024", + ) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + radio_fields = {"state": VERTICAL} + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class PrepopulatedFieldsCheckTests(CheckTestCase): + def test_not_list_or_tuple(self): + class TestModelAdmin(ModelAdmin): + prepopulated_fields = {"slug": "test"} + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'prepopulated_fields[\"slug\"]' must be a list or tuple.", + "admin.E029", + ) + + def test_not_dictionary(self): + class TestModelAdmin(ModelAdmin): + prepopulated_fields = () + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'prepopulated_fields' must be a dictionary.", + "admin.E026", + ) + + def test_missing_field(self): + class TestModelAdmin(ModelAdmin): + prepopulated_fields = {"non_existent_field": ("slug",)} + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'prepopulated_fields' refers to 'non_existent_field', " + "which is not a field of 'modeladmin.ValidationTestModel'.", + "admin.E027", + ) + + def test_missing_field_again(self): + class TestModelAdmin(ModelAdmin): + prepopulated_fields = {"slug": ("non_existent_field",)} + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'prepopulated_fields[\"slug\"][0]' refers to " + "'non_existent_field', which is not a field of " + "'modeladmin.ValidationTestModel'.", + "admin.E030", + ) + + def test_invalid_field_type(self): + class TestModelAdmin(ModelAdmin): + prepopulated_fields = {"users": ("name",)} + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'prepopulated_fields' refers to 'users', which must not be " + "a DateTimeField, a ForeignKey, a OneToOneField, or a ManyToManyField.", + "admin.E028", + ) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + prepopulated_fields = {"slug": ("name",)} + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_one_to_one_field(self): + class TestModelAdmin(ModelAdmin): + prepopulated_fields = {"best_friend": ("name",)} + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'prepopulated_fields' refers to 'best_friend', which must " + "not be a DateTimeField, a ForeignKey, a OneToOneField, or a " + "ManyToManyField.", + "admin.E028", + ) + + +class ListDisplayTests(CheckTestCase): + def test_not_iterable(self): + class TestModelAdmin(ModelAdmin): + list_display = 10 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_display' must be a list or tuple.", + "admin.E107", + ) + + def test_missing_field(self): + class TestModelAdmin(ModelAdmin): + list_display = ("non_existent_field",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_display[0]' refers to 'non_existent_field', " + "which is not a callable, an attribute of 'TestModelAdmin', " + "or an attribute or method on 'modeladmin.ValidationTestModel'.", + "admin.E108", + ) + + def test_invalid_field_type(self): + class TestModelAdmin(ModelAdmin): + list_display = ("users",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_display[0]' must not be a many-to-many field or a " + "reverse foreign key.", + "admin.E109", + ) + + def test_invalid_reverse_related_field(self): + class TestModelAdmin(ModelAdmin): + list_display = ["song_set"] + + self.assertIsInvalid( + TestModelAdmin, + Band, + "The value of 'list_display[0]' must not be a many-to-many field or a " + "reverse foreign key.", + "admin.E109", + ) + + def test_invalid_related_field(self): + class TestModelAdmin(ModelAdmin): + list_display = ["song"] + + self.assertIsInvalid( + TestModelAdmin, + Band, + "The value of 'list_display[0]' must not be a many-to-many field or a " + "reverse foreign key.", + "admin.E109", + ) + + def test_invalid_m2m_related_name(self): + class TestModelAdmin(ModelAdmin): + list_display = ["featured"] + + self.assertIsInvalid( + TestModelAdmin, + Band, + "The value of 'list_display[0]' must not be a many-to-many field or a " + "reverse foreign key.", + "admin.E109", + ) + + def test_valid_case(self): + @admin.display + def a_callable(obj): + pass + + class TestModelAdmin(ModelAdmin): + @admin.display + def a_method(self, obj): + pass + + list_display = ("name", "decade_published_in", "a_method", a_callable) + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_valid_field_accessible_via_instance(self): + class PositionField(Field): + """Custom field accessible only via instance.""" + + def contribute_to_class(self, cls, name): + super().contribute_to_class(cls, name) + setattr(cls, self.name, self) + + def __get__(self, instance, owner): + if instance is None: + raise AttributeError() + + class TestModel(Model): + field = PositionField() + + class TestModelAdmin(ModelAdmin): + list_display = ("field",) + + self.assertIsValid(TestModelAdmin, TestModel) + + +class ListDisplayLinksCheckTests(CheckTestCase): + def test_not_iterable(self): + class TestModelAdmin(ModelAdmin): + list_display_links = 10 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_display_links' must be a list, a tuple, or None.", + "admin.E110", + ) + + def test_missing_field(self): + class TestModelAdmin(ModelAdmin): + list_display_links = ("non_existent_field",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + ( + "The value of 'list_display_links[0]' refers to " + "'non_existent_field', which is not defined in 'list_display'." + ), + "admin.E111", + ) + + def test_missing_in_list_display(self): + class TestModelAdmin(ModelAdmin): + list_display_links = ("name",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_display_links[0]' refers to 'name', which is not " + "defined in 'list_display'.", + "admin.E111", + ) + + def test_valid_case(self): + @admin.display + def a_callable(obj): + pass + + class TestModelAdmin(ModelAdmin): + @admin.display + def a_method(self, obj): + pass + + list_display = ("name", "decade_published_in", "a_method", a_callable) + list_display_links = ("name", "decade_published_in", "a_method", a_callable) + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_None_is_valid_case(self): + class TestModelAdmin(ModelAdmin): + list_display_links = None + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_list_display_links_check_skipped_if_get_list_display_overridden(self): + """ + list_display_links check is skipped if get_list_display() is overridden. + """ + + class TestModelAdmin(ModelAdmin): + list_display_links = ["name", "subtitle"] + + def get_list_display(self, request): + pass + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_list_display_link_checked_for_list_tuple_if_get_list_display_overridden( + self, + ): + """ + list_display_links is checked for list/tuple/None even if + get_list_display() is overridden. + """ + + class TestModelAdmin(ModelAdmin): + list_display_links = "non-list/tuple" + + def get_list_display(self, request): + pass + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_display_links' must be a list, a tuple, or None.", + "admin.E110", + ) + + +class ListFilterTests(CheckTestCase): + def test_list_filter_validation(self): + class TestModelAdmin(ModelAdmin): + list_filter = 10 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_filter' must be a list or tuple.", + "admin.E112", + ) + + def test_not_list_filter_class(self): + class TestModelAdmin(ModelAdmin): + list_filter = ["RandomClass"] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_filter[0]' refers to 'RandomClass', which " + "does not refer to a Field.", + "admin.E116", + ) + + def test_callable(self): + def random_callable(): + pass + + class TestModelAdmin(ModelAdmin): + list_filter = [random_callable] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_filter[0]' must inherit from 'ListFilter'.", + "admin.E113", + ) + + def test_not_callable(self): + class TestModelAdmin(ModelAdmin): + list_filter = [[42, 42]] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_filter[0][1]' must inherit from 'FieldListFilter'.", + "admin.E115", + ) + + def test_missing_field(self): + class TestModelAdmin(ModelAdmin): + list_filter = ("non_existent_field",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_filter[0]' refers to 'non_existent_field', " + "which does not refer to a Field.", + "admin.E116", + ) + + def test_not_filter(self): + class RandomClass: + pass + + class TestModelAdmin(ModelAdmin): + list_filter = (RandomClass,) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_filter[0]' must inherit from 'ListFilter'.", + "admin.E113", + ) + + def test_not_filter_again(self): + class RandomClass: + pass + + class TestModelAdmin(ModelAdmin): + list_filter = (("is_active", RandomClass),) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_filter[0][1]' must inherit from 'FieldListFilter'.", + "admin.E115", + ) + + def test_not_filter_again_again(self): + class AwesomeFilter(SimpleListFilter): + def get_title(self): + return "awesomeness" + + def get_choices(self, request): + return (("bit", "A bit awesome"), ("very", "Very awesome")) + + def get_queryset(self, cl, qs): + return qs + + class TestModelAdmin(ModelAdmin): + list_filter = (("is_active", AwesomeFilter),) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_filter[0][1]' must inherit from 'FieldListFilter'.", + "admin.E115", + ) + + def test_list_filter_is_func(self): + def get_filter(): + pass + + class TestModelAdmin(ModelAdmin): + list_filter = [get_filter] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_filter[0]' must inherit from 'ListFilter'.", + "admin.E113", + ) + + def test_not_associated_with_field_name(self): + class TestModelAdmin(ModelAdmin): + list_filter = (BooleanFieldListFilter,) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_filter[0]' must not inherit from 'FieldListFilter'.", + "admin.E114", + ) + + def test_valid_case(self): + class AwesomeFilter(SimpleListFilter): + def get_title(self): + return "awesomeness" + + def get_choices(self, request): + return (("bit", "A bit awesome"), ("very", "Very awesome")) + + def get_queryset(self, cl, qs): + return qs + + class TestModelAdmin(ModelAdmin): + list_filter = ( + "is_active", + AwesomeFilter, + ("is_active", BooleanFieldListFilter), + "no", + ) + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class ListPerPageCheckTests(CheckTestCase): + def test_not_integer(self): + class TestModelAdmin(ModelAdmin): + list_per_page = "hello" + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_per_page' must be an integer.", + "admin.E118", + ) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + list_per_page = 100 + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class ListMaxShowAllCheckTests(CheckTestCase): + def test_not_integer(self): + class TestModelAdmin(ModelAdmin): + list_max_show_all = "hello" + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_max_show_all' must be an integer.", + "admin.E119", + ) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + list_max_show_all = 200 + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class SearchFieldsCheckTests(CheckTestCase): + def test_not_iterable(self): + class TestModelAdmin(ModelAdmin): + search_fields = 10 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'search_fields' must be a list or tuple.", + "admin.E126", + ) + + +class DateHierarchyCheckTests(CheckTestCase): + def test_missing_field(self): + class TestModelAdmin(ModelAdmin): + date_hierarchy = "non_existent_field" + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'date_hierarchy' refers to 'non_existent_field', " + "which does not refer to a Field.", + "admin.E127", + ) + + def test_invalid_field_type(self): + class TestModelAdmin(ModelAdmin): + date_hierarchy = "name" + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'date_hierarchy' must be a DateField or DateTimeField.", + "admin.E128", + ) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + date_hierarchy = "pub_date" + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_related_valid_case(self): + class TestModelAdmin(ModelAdmin): + date_hierarchy = "band__sign_date" + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_related_invalid_field_type(self): + class TestModelAdmin(ModelAdmin): + date_hierarchy = "band__name" + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'date_hierarchy' must be a DateField or DateTimeField.", + "admin.E128", + ) + + +class OrderingCheckTests(CheckTestCase): + def test_not_iterable(self): + class TestModelAdmin(ModelAdmin): + ordering = 10 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'ordering' must be a list or tuple.", + "admin.E031", + ) + + class TestModelAdmin(ModelAdmin): + ordering = ("non_existent_field",) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'ordering[0]' refers to 'non_existent_field', " + "which is not a field of 'modeladmin.ValidationTestModel'.", + "admin.E033", + ) + + def test_random_marker_not_alone(self): + class TestModelAdmin(ModelAdmin): + ordering = ("?", "name") + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'ordering' has the random ordering marker '?', but contains " + "other fields as well.", + "admin.E032", + hint='Either remove the "?", or remove the other fields.', + ) + + def test_valid_random_marker_case(self): + class TestModelAdmin(ModelAdmin): + ordering = ("?",) + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_valid_complex_case(self): + class TestModelAdmin(ModelAdmin): + ordering = ("band__name",) + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + ordering = ("name", "pk") + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_invalid_expression(self): + class TestModelAdmin(ModelAdmin): + ordering = (F("nonexistent"),) + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'ordering[0]' refers to 'nonexistent', which is not " + "a field of 'modeladmin.ValidationTestModel'.", + "admin.E033", + ) + + def test_valid_expression(self): + class TestModelAdmin(ModelAdmin): + ordering = (Upper("name"), Upper("band__name").desc()) + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class ListSelectRelatedCheckTests(CheckTestCase): + def test_invalid_type(self): + class TestModelAdmin(ModelAdmin): + list_select_related = 1 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'list_select_related' must be a boolean, tuple or list.", + "admin.E117", + ) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + list_select_related = False + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class SaveAsCheckTests(CheckTestCase): + def test_not_boolean(self): + class TestModelAdmin(ModelAdmin): + save_as = 1 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'save_as' must be a boolean.", + "admin.E101", + ) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + save_as = True + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class SaveOnTopCheckTests(CheckTestCase): + def test_not_boolean(self): + class TestModelAdmin(ModelAdmin): + save_on_top = 1 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'save_on_top' must be a boolean.", + "admin.E102", + ) + + def test_valid_case(self): + class TestModelAdmin(ModelAdmin): + save_on_top = True + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class InlinesCheckTests(CheckTestCase): + def test_not_iterable(self): + class TestModelAdmin(ModelAdmin): + inlines = 10 + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'inlines' must be a list or tuple.", + "admin.E103", + ) + + def test_not_correct_inline_field(self): + class TestModelAdmin(ModelAdmin): + inlines = [42] + + self.assertIsInvalidRegexp( + TestModelAdmin, + ValidationTestModel, + r"'.*\.TestModelAdmin' must inherit from 'InlineModelAdmin'\.", + "admin.E104", + ) + + def test_not_model_admin(self): + class ValidationTestInline: + pass + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsInvalidRegexp( + TestModelAdmin, + ValidationTestModel, + r"'.*\.ValidationTestInline' must inherit from 'InlineModelAdmin'\.", + "admin.E104", + ) + + def test_missing_model_field(self): + class ValidationTestInline(TabularInline): + pass + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsInvalidRegexp( + TestModelAdmin, + ValidationTestModel, + r"'.*\.ValidationTestInline' must have a 'model' attribute\.", + "admin.E105", + ) + + def test_invalid_model_type(self): + class SomethingBad: + pass + + class ValidationTestInline(TabularInline): + model = SomethingBad + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsInvalidRegexp( + TestModelAdmin, + ValidationTestModel, + r"The value of '.*\.ValidationTestInline.model' must be a Model\.", + "admin.E106", + ) + + def test_invalid_model(self): + class ValidationTestInline(TabularInline): + model = "Not a class" + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsInvalidRegexp( + TestModelAdmin, + ValidationTestModel, + r"The value of '.*\.ValidationTestInline.model' must be a Model\.", + "admin.E106", + ) + + def test_invalid_callable(self): + def random_obj(): + pass + + class TestModelAdmin(ModelAdmin): + inlines = [random_obj] + + self.assertIsInvalidRegexp( + TestModelAdmin, + ValidationTestModel, + r"'.*\.random_obj' must inherit from 'InlineModelAdmin'\.", + "admin.E104", + ) + + def test_valid_case(self): + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class FkNameCheckTests(CheckTestCase): + def test_missing_field(self): + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + fk_name = "non_existent_field" + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "'modeladmin.ValidationTestInlineModel' has no field named " + "'non_existent_field'.", + "admin.E202", + invalid_obj=ValidationTestInline, + ) + + def test_valid_case(self): + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + fk_name = "parent" + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + def test_proxy_model_parent(self): + class Parent(Model): + pass + + class ProxyChild(Parent): + class Meta: + proxy = True + + class ProxyProxyChild(ProxyChild): + class Meta: + proxy = True + + class Related(Model): + proxy_child = ForeignKey(ProxyChild, on_delete=CASCADE) + + class InlineFkName(admin.TabularInline): + model = Related + fk_name = "proxy_child" + + class InlineNoFkName(admin.TabularInline): + model = Related + + class ProxyProxyChildAdminFkName(admin.ModelAdmin): + inlines = [InlineFkName, InlineNoFkName] + + self.assertIsValid(ProxyProxyChildAdminFkName, ProxyProxyChild) + + +class ExtraCheckTests(CheckTestCase): + def test_not_integer(self): + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + extra = "hello" + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'extra' must be an integer.", + "admin.E203", + invalid_obj=ValidationTestInline, + ) + + def test_valid_case(self): + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + extra = 2 + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class MaxNumCheckTests(CheckTestCase): + def test_not_integer(self): + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + max_num = "hello" + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'max_num' must be an integer.", + "admin.E204", + invalid_obj=ValidationTestInline, + ) + + def test_valid_case(self): + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + max_num = 2 + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class MinNumCheckTests(CheckTestCase): + def test_not_integer(self): + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + min_num = "hello" + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'min_num' must be an integer.", + "admin.E205", + invalid_obj=ValidationTestInline, + ) + + def test_valid_case(self): + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + min_num = 2 + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class FormsetCheckTests(CheckTestCase): + def test_invalid_type(self): + class FakeFormSet: + pass + + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + formset = FakeFormSet + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsInvalid( + TestModelAdmin, + ValidationTestModel, + "The value of 'formset' must inherit from 'BaseModelFormSet'.", + "admin.E206", + invalid_obj=ValidationTestInline, + ) + + def test_inline_without_formset_class(self): + class ValidationTestInlineWithoutFormsetClass(TabularInline): + model = ValidationTestInlineModel + formset = "Not a FormSet Class" + + class TestModelAdminWithoutFormsetClass(ModelAdmin): + inlines = [ValidationTestInlineWithoutFormsetClass] + + self.assertIsInvalid( + TestModelAdminWithoutFormsetClass, + ValidationTestModel, + "The value of 'formset' must inherit from 'BaseModelFormSet'.", + "admin.E206", + invalid_obj=ValidationTestInlineWithoutFormsetClass, + ) + + def test_valid_case(self): + class RealModelFormSet(BaseModelFormSet): + pass + + class ValidationTestInline(TabularInline): + model = ValidationTestInlineModel + formset = RealModelFormSet + + class TestModelAdmin(ModelAdmin): + inlines = [ValidationTestInline] + + self.assertIsValid(TestModelAdmin, ValidationTestModel) + + +class ListDisplayEditableTests(CheckTestCase): + def test_list_display_links_is_none(self): + """ + list_display and list_editable can contain the same values + when list_display_links is None + """ + + class ProductAdmin(ModelAdmin): + list_display = ["name", "slug", "pub_date"] + list_editable = list_display + list_display_links = None + + self.assertIsValid(ProductAdmin, ValidationTestModel) + + def test_list_display_first_item_same_as_list_editable_first_item(self): + """ + The first item in list_display can be the same as the first in + list_editable. + """ + + class ProductAdmin(ModelAdmin): + list_display = ["name", "slug", "pub_date"] + list_editable = ["name", "slug"] + list_display_links = ["pub_date"] + + self.assertIsValid(ProductAdmin, ValidationTestModel) + + def test_list_display_first_item_in_list_editable(self): + """ + The first item in list_display can be in list_editable as long as + list_display_links is defined. + """ + + class ProductAdmin(ModelAdmin): + list_display = ["name", "slug", "pub_date"] + list_editable = ["slug", "name"] + list_display_links = ["pub_date"] + + self.assertIsValid(ProductAdmin, ValidationTestModel) + + def test_list_display_first_item_same_as_list_editable_no_list_display_links(self): + """ + The first item in list_display cannot be the same as the first item + in list_editable if list_display_links is not defined. + """ + + class ProductAdmin(ModelAdmin): + list_display = ["name"] + list_editable = ["name"] + + self.assertIsInvalid( + ProductAdmin, + ValidationTestModel, + "The value of 'list_editable[0]' refers to the first field " + "in 'list_display' ('name'), which cannot be used unless " + "'list_display_links' is set.", + id="admin.E124", + ) + + def test_list_display_first_item_in_list_editable_no_list_display_links(self): + """ + The first item in list_display cannot be in list_editable if + list_display_links isn't defined. + """ + + class ProductAdmin(ModelAdmin): + list_display = ["name", "slug", "pub_date"] + list_editable = ["slug", "name"] + + self.assertIsInvalid( + ProductAdmin, + ValidationTestModel, + "The value of 'list_editable[1]' refers to the first field " + "in 'list_display' ('name'), which cannot be used unless " + "'list_display_links' is set.", + id="admin.E124", + ) + + def test_both_list_editable_and_list_display_links(self): + class ProductAdmin(ModelAdmin): + list_editable = ("name",) + list_display = ("name",) + list_display_links = ("name",) + + self.assertIsInvalid( + ProductAdmin, + ValidationTestModel, + "The value of 'name' cannot be in both 'list_editable' and " + "'list_display_links'.", + id="admin.E123", + ) + + +class AutocompleteFieldsTests(CheckTestCase): + def test_autocomplete_e036(self): + class Admin(ModelAdmin): + autocomplete_fields = "name" + + self.assertIsInvalid( + Admin, + Band, + msg="The value of 'autocomplete_fields' must be a list or tuple.", + id="admin.E036", + invalid_obj=Admin, + ) + + def test_autocomplete_e037(self): + class Admin(ModelAdmin): + autocomplete_fields = ("nonexistent",) + + self.assertIsInvalid( + Admin, + ValidationTestModel, + msg=( + "The value of 'autocomplete_fields[0]' refers to 'nonexistent', " + "which is not a field of 'modeladmin.ValidationTestModel'." + ), + id="admin.E037", + invalid_obj=Admin, + ) + + def test_autocomplete_e38(self): + class Admin(ModelAdmin): + autocomplete_fields = ("name",) + + self.assertIsInvalid( + Admin, + ValidationTestModel, + msg=( + "The value of 'autocomplete_fields[0]' must be a foreign " + "key or a many-to-many field." + ), + id="admin.E038", + invalid_obj=Admin, + ) + + def test_autocomplete_e039(self): + class Admin(ModelAdmin): + autocomplete_fields = ("band",) + + self.assertIsInvalid( + Admin, + Song, + msg=( + 'An admin for model "Band" has to be registered ' + "to be referenced by Admin.autocomplete_fields." + ), + id="admin.E039", + invalid_obj=Admin, + ) + + def test_autocomplete_e040(self): + class NoSearchFieldsAdmin(ModelAdmin): + pass + + class AutocompleteAdmin(ModelAdmin): + autocomplete_fields = ("featuring",) + + site = AdminSite() + site.register(Band, NoSearchFieldsAdmin) + self.assertIsInvalid( + AutocompleteAdmin, + Song, + msg=( + 'NoSearchFieldsAdmin must define "search_fields", because ' + "it's referenced by AutocompleteAdmin.autocomplete_fields." + ), + id="admin.E040", + invalid_obj=AutocompleteAdmin, + admin_site=site, + ) + + def test_autocomplete_is_valid(self): + class SearchFieldsAdmin(ModelAdmin): + search_fields = "name" + + class AutocompleteAdmin(ModelAdmin): + autocomplete_fields = ("featuring",) + + site = AdminSite() + site.register(Band, SearchFieldsAdmin) + self.assertIsValid(AutocompleteAdmin, Song, admin_site=site) + + def test_autocomplete_is_onetoone(self): + class UserAdmin(ModelAdmin): + search_fields = ("name",) + + class Admin(ModelAdmin): + autocomplete_fields = ("best_friend",) + + site = AdminSite() + site.register(User, UserAdmin) + self.assertIsValid(Admin, ValidationTestModel, admin_site=site) + + +class ActionsCheckTests(CheckTestCase): + def test_custom_permissions_require_matching_has_method(self): + @admin.action(permissions=["custom"]) + def custom_permission_action(modeladmin, request, queryset): + pass + + class BandAdmin(ModelAdmin): + actions = (custom_permission_action,) + + self.assertIsInvalid( + BandAdmin, + Band, + "BandAdmin must define a has_custom_permission() method for the " + "custom_permission_action action.", + id="admin.E129", + ) + + def test_actions_not_unique(self): + @admin.action + def action(modeladmin, request, queryset): + pass + + class BandAdmin(ModelAdmin): + actions = (action, action) + + self.assertIsInvalid( + BandAdmin, + Band, + "__name__ attributes of actions defined in BandAdmin must be " + "unique. Name 'action' is not unique.", + id="admin.E130", + ) + + def test_actions_unique(self): + @admin.action + def action1(modeladmin, request, queryset): + pass + + @admin.action + def action2(modeladmin, request, queryset): + pass + + class BandAdmin(ModelAdmin): + actions = (action1, action2) + + self.assertIsValid(BandAdmin, Band) diff --git a/testbed/django__django/tests/modeladmin/tests.py b/testbed/django__django/tests/modeladmin/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..f2763ecc0fb3296ba9edc303f8b5027ee4bb6702 --- /dev/null +++ b/testbed/django__django/tests/modeladmin/tests.py @@ -0,0 +1,1053 @@ +from datetime import date + +from django import forms +from django.contrib.admin.models import ADDITION, CHANGE, DELETION, LogEntry +from django.contrib.admin.options import ( + HORIZONTAL, + VERTICAL, + ModelAdmin, + TabularInline, + get_content_type_for_model, +) +from django.contrib.admin.sites import AdminSite +from django.contrib.admin.widgets import ( + AdminDateWidget, + AdminRadioSelect, + AutocompleteSelect, + AutocompleteSelectMultiple, +) +from django.contrib.auth.models import User +from django.db import models +from django.forms.widgets import Select +from django.test import RequestFactory, SimpleTestCase, TestCase +from django.test.utils import isolate_apps +from django.utils.deprecation import RemovedInDjango60Warning + +from .models import Band, Concert, Song + + +class MockRequest: + pass + + +class MockSuperUser: + def has_perm(self, perm, obj=None): + return True + + +request = MockRequest() +request.user = MockSuperUser() + + +class ModelAdminTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.band = Band.objects.create( + name="The Doors", + bio="", + sign_date=date(1965, 1, 1), + ) + + def setUp(self): + self.site = AdminSite() + + def test_modeladmin_str(self): + ma = ModelAdmin(Band, self.site) + self.assertEqual(str(ma), "modeladmin.ModelAdmin") + + def test_default_attributes(self): + ma = ModelAdmin(Band, self.site) + self.assertEqual(ma.actions, ()) + self.assertEqual(ma.inlines, ()) + + # form/fields/fieldsets interaction ############################## + + def test_default_fields(self): + ma = ModelAdmin(Band, self.site) + self.assertEqual( + list(ma.get_form(request).base_fields), ["name", "bio", "sign_date"] + ) + self.assertEqual(list(ma.get_fields(request)), ["name", "bio", "sign_date"]) + self.assertEqual( + list(ma.get_fields(request, self.band)), ["name", "bio", "sign_date"] + ) + self.assertIsNone(ma.get_exclude(request, self.band)) + + def test_default_fieldsets(self): + # fieldsets_add and fieldsets_change should return a special data structure that + # is used in the templates. They should generate the "right thing" whether we + # have specified a custom form, the fields argument, or nothing at all. + # + # Here's the default case. There are no custom form_add/form_change methods, + # no fields argument, and no fieldsets argument. + ma = ModelAdmin(Band, self.site) + self.assertEqual( + ma.get_fieldsets(request), + [(None, {"fields": ["name", "bio", "sign_date"]})], + ) + self.assertEqual( + ma.get_fieldsets(request, self.band), + [(None, {"fields": ["name", "bio", "sign_date"]})], + ) + + def test_get_fieldsets(self): + # get_fieldsets() is called when figuring out form fields (#18681). + class BandAdmin(ModelAdmin): + def get_fieldsets(self, request, obj=None): + return [(None, {"fields": ["name", "bio"]})] + + ma = BandAdmin(Band, self.site) + form = ma.get_form(None) + self.assertEqual(form._meta.fields, ["name", "bio"]) + + class InlineBandAdmin(TabularInline): + model = Concert + fk_name = "main_band" + can_delete = False + + def get_fieldsets(self, request, obj=None): + return [(None, {"fields": ["day", "transport"]})] + + ma = InlineBandAdmin(Band, self.site) + form = ma.get_formset(None).form + self.assertEqual(form._meta.fields, ["day", "transport"]) + + def test_lookup_allowed_allows_nonexistent_lookup(self): + """ + A lookup_allowed allows a parameter whose field lookup doesn't exist. + (#21129). + """ + + class BandAdmin(ModelAdmin): + fields = ["name"] + + ma = BandAdmin(Band, self.site) + self.assertIs( + ma.lookup_allowed("name__nonexistent", "test_value", request), + True, + ) + + @isolate_apps("modeladmin") + def test_lookup_allowed_onetoone(self): + class Department(models.Model): + code = models.CharField(max_length=4, unique=True) + + class Employee(models.Model): + department = models.ForeignKey(Department, models.CASCADE, to_field="code") + + class EmployeeProfile(models.Model): + employee = models.OneToOneField(Employee, models.CASCADE) + + class EmployeeInfo(models.Model): + employee = models.OneToOneField(Employee, models.CASCADE) + description = models.CharField(max_length=100) + + class EmployeeProfileAdmin(ModelAdmin): + list_filter = [ + "employee__employeeinfo__description", + "employee__department__code", + ] + + ma = EmployeeProfileAdmin(EmployeeProfile, self.site) + # Reverse OneToOneField + self.assertIs( + ma.lookup_allowed( + "employee__employeeinfo__description", "test_value", request + ), + True, + ) + # OneToOneField and ForeignKey + self.assertIs( + ma.lookup_allowed("employee__department__code", "test_value", request), + True, + ) + + @isolate_apps("modeladmin") + def test_lookup_allowed_foreign_primary(self): + class Country(models.Model): + name = models.CharField(max_length=256) + + class Place(models.Model): + country = models.ForeignKey(Country, models.CASCADE) + + class Restaurant(models.Model): + place = models.OneToOneField(Place, models.CASCADE, primary_key=True) + + class Waiter(models.Model): + restaurant = models.ForeignKey(Restaurant, models.CASCADE) + + class WaiterAdmin(ModelAdmin): + list_filter = [ + "restaurant__place__country", + "restaurant__place__country__name", + ] + + ma = WaiterAdmin(Waiter, self.site) + self.assertIs( + ma.lookup_allowed("restaurant__place__country", "1", request), + True, + ) + self.assertIs( + ma.lookup_allowed("restaurant__place__country__id__exact", "1", request), + True, + ) + self.assertIs( + ma.lookup_allowed( + "restaurant__place__country__name", "test_value", request + ), + True, + ) + + def test_lookup_allowed_considers_dynamic_list_filter(self): + class ConcertAdmin(ModelAdmin): + list_filter = ["main_band__sign_date"] + + def get_list_filter(self, request): + if getattr(request, "user", None): + return self.list_filter + ["main_band__name"] + return self.list_filter + + model_admin = ConcertAdmin(Concert, self.site) + request_band_name_filter = RequestFactory().get( + "/", {"main_band__name": "test"} + ) + self.assertIs( + model_admin.lookup_allowed( + "main_band__sign_date", "?", request_band_name_filter + ), + True, + ) + self.assertIs( + model_admin.lookup_allowed( + "main_band__name", "?", request_band_name_filter + ), + False, + ) + request_with_superuser = request + self.assertIs( + model_admin.lookup_allowed( + "main_band__sign_date", "?", request_with_superuser + ), + True, + ) + self.assertIs( + model_admin.lookup_allowed("main_band__name", "?", request_with_superuser), + True, + ) + + def test_lookup_allowed_without_request_deprecation(self): + class ConcertAdmin(ModelAdmin): + list_filter = ["main_band__sign_date"] + + def get_list_filter(self, request): + return self.list_filter + ["main_band__name"] + + def lookup_allowed(self, lookup, value): + return True + + model_admin = ConcertAdmin(Concert, self.site) + msg = ( + "`request` must be added to the signature of ModelAdminTests." + "test_lookup_allowed_without_request_deprecation.." + "ConcertAdmin.lookup_allowed()." + ) + request_band_name_filter = RequestFactory().get( + "/", {"main_band__name": "test"} + ) + request_band_name_filter.user = User.objects.create_superuser( + username="bob", email="bob@test.com", password="test" + ) + with self.assertWarnsMessage(RemovedInDjango60Warning, msg): + changelist = model_admin.get_changelist_instance(request_band_name_filter) + filterspec = changelist.get_filters(request_band_name_filter)[0][0] + self.assertEqual(filterspec.title, "sign date") + filterspec = changelist.get_filters(request_band_name_filter)[0][1] + self.assertEqual(filterspec.title, "name") + self.assertSequenceEqual(filterspec.lookup_choices, [self.band.name]) + + def test_field_arguments(self): + # If fields is specified, fieldsets_add and fieldsets_change should + # just stick the fields into a formsets structure and return it. + class BandAdmin(ModelAdmin): + fields = ["name"] + + ma = BandAdmin(Band, self.site) + + self.assertEqual(list(ma.get_fields(request)), ["name"]) + self.assertEqual(list(ma.get_fields(request, self.band)), ["name"]) + self.assertEqual(ma.get_fieldsets(request), [(None, {"fields": ["name"]})]) + self.assertEqual( + ma.get_fieldsets(request, self.band), [(None, {"fields": ["name"]})] + ) + + def test_field_arguments_restricted_on_form(self): + # If fields or fieldsets is specified, it should exclude fields on the + # Form class to the fields specified. This may cause errors to be + # raised in the db layer if required model fields aren't in fields/ + # fieldsets, but that's preferable to ghost errors where a field in the + # Form class isn't being displayed because it's not in fields/fieldsets. + + # Using `fields`. + class BandAdmin(ModelAdmin): + fields = ["name"] + + ma = BandAdmin(Band, self.site) + self.assertEqual(list(ma.get_form(request).base_fields), ["name"]) + self.assertEqual(list(ma.get_form(request, self.band).base_fields), ["name"]) + + # Using `fieldsets`. + class BandAdmin(ModelAdmin): + fieldsets = [(None, {"fields": ["name"]})] + + ma = BandAdmin(Band, self.site) + self.assertEqual(list(ma.get_form(request).base_fields), ["name"]) + self.assertEqual(list(ma.get_form(request, self.band).base_fields), ["name"]) + + # Using `exclude`. + class BandAdmin(ModelAdmin): + exclude = ["bio"] + + ma = BandAdmin(Band, self.site) + self.assertEqual(list(ma.get_form(request).base_fields), ["name", "sign_date"]) + + # You can also pass a tuple to `exclude`. + class BandAdmin(ModelAdmin): + exclude = ("bio",) + + ma = BandAdmin(Band, self.site) + self.assertEqual(list(ma.get_form(request).base_fields), ["name", "sign_date"]) + + # Using `fields` and `exclude`. + class BandAdmin(ModelAdmin): + fields = ["name", "bio"] + exclude = ["bio"] + + ma = BandAdmin(Band, self.site) + self.assertEqual(list(ma.get_form(request).base_fields), ["name"]) + + def test_custom_form_meta_exclude_with_readonly(self): + """ + The custom ModelForm's `Meta.exclude` is respected when used in + conjunction with `ModelAdmin.readonly_fields` and when no + `ModelAdmin.exclude` is defined (#14496). + """ + + # With ModelAdmin + class AdminBandForm(forms.ModelForm): + class Meta: + model = Band + exclude = ["bio"] + + class BandAdmin(ModelAdmin): + readonly_fields = ["name"] + form = AdminBandForm + + ma = BandAdmin(Band, self.site) + self.assertEqual(list(ma.get_form(request).base_fields), ["sign_date"]) + + # With InlineModelAdmin + class AdminConcertForm(forms.ModelForm): + class Meta: + model = Concert + exclude = ["day"] + + class ConcertInline(TabularInline): + readonly_fields = ["transport"] + form = AdminConcertForm + fk_name = "main_band" + model = Concert + + class BandAdmin(ModelAdmin): + inlines = [ConcertInline] + + ma = BandAdmin(Band, self.site) + self.assertEqual( + list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), + ["main_band", "opening_band", "id", "DELETE"], + ) + + def test_custom_formfield_override_readonly(self): + class AdminBandForm(forms.ModelForm): + name = forms.CharField() + + class Meta: + exclude = () + model = Band + + class BandAdmin(ModelAdmin): + form = AdminBandForm + readonly_fields = ["name"] + + ma = BandAdmin(Band, self.site) + + # `name` shouldn't appear in base_fields because it's part of + # readonly_fields. + self.assertEqual(list(ma.get_form(request).base_fields), ["bio", "sign_date"]) + # But it should appear in get_fields()/fieldsets() so it can be + # displayed as read-only. + self.assertEqual(list(ma.get_fields(request)), ["bio", "sign_date", "name"]) + self.assertEqual( + list(ma.get_fieldsets(request)), + [(None, {"fields": ["bio", "sign_date", "name"]})], + ) + + def test_custom_form_meta_exclude(self): + """ + The custom ModelForm's `Meta.exclude` is overridden if + `ModelAdmin.exclude` or `InlineModelAdmin.exclude` are defined (#14496). + """ + + # With ModelAdmin + class AdminBandForm(forms.ModelForm): + class Meta: + model = Band + exclude = ["bio"] + + class BandAdmin(ModelAdmin): + exclude = ["name"] + form = AdminBandForm + + ma = BandAdmin(Band, self.site) + self.assertEqual(list(ma.get_form(request).base_fields), ["bio", "sign_date"]) + + # With InlineModelAdmin + class AdminConcertForm(forms.ModelForm): + class Meta: + model = Concert + exclude = ["day"] + + class ConcertInline(TabularInline): + exclude = ["transport"] + form = AdminConcertForm + fk_name = "main_band" + model = Concert + + class BandAdmin(ModelAdmin): + inlines = [ConcertInline] + + ma = BandAdmin(Band, self.site) + self.assertEqual( + list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), + ["main_band", "opening_band", "day", "id", "DELETE"], + ) + + def test_overriding_get_exclude(self): + class BandAdmin(ModelAdmin): + def get_exclude(self, request, obj=None): + return ["name"] + + self.assertEqual( + list(BandAdmin(Band, self.site).get_form(request).base_fields), + ["bio", "sign_date"], + ) + + def test_get_exclude_overrides_exclude(self): + class BandAdmin(ModelAdmin): + exclude = ["bio"] + + def get_exclude(self, request, obj=None): + return ["name"] + + self.assertEqual( + list(BandAdmin(Band, self.site).get_form(request).base_fields), + ["bio", "sign_date"], + ) + + def test_get_exclude_takes_obj(self): + class BandAdmin(ModelAdmin): + def get_exclude(self, request, obj=None): + if obj: + return ["sign_date"] + return ["name"] + + self.assertEqual( + list(BandAdmin(Band, self.site).get_form(request, self.band).base_fields), + ["name", "bio"], + ) + + def test_custom_form_validation(self): + # If a form is specified, it should use it allowing custom validation + # to work properly. This won't break any of the admin widgets or media. + class AdminBandForm(forms.ModelForm): + delete = forms.BooleanField() + + class BandAdmin(ModelAdmin): + form = AdminBandForm + + ma = BandAdmin(Band, self.site) + self.assertEqual( + list(ma.get_form(request).base_fields), + ["name", "bio", "sign_date", "delete"], + ) + self.assertEqual( + type(ma.get_form(request).base_fields["sign_date"].widget), AdminDateWidget + ) + + def test_form_exclude_kwarg_override(self): + """ + The `exclude` kwarg passed to `ModelAdmin.get_form()` overrides all + other declarations (#8999). + """ + + class AdminBandForm(forms.ModelForm): + class Meta: + model = Band + exclude = ["name"] + + class BandAdmin(ModelAdmin): + exclude = ["sign_date"] + form = AdminBandForm + + def get_form(self, request, obj=None, **kwargs): + kwargs["exclude"] = ["bio"] + return super().get_form(request, obj, **kwargs) + + ma = BandAdmin(Band, self.site) + self.assertEqual(list(ma.get_form(request).base_fields), ["name", "sign_date"]) + + def test_formset_exclude_kwarg_override(self): + """ + The `exclude` kwarg passed to `InlineModelAdmin.get_formset()` + overrides all other declarations (#8999). + """ + + class AdminConcertForm(forms.ModelForm): + class Meta: + model = Concert + exclude = ["day"] + + class ConcertInline(TabularInline): + exclude = ["transport"] + form = AdminConcertForm + fk_name = "main_band" + model = Concert + + def get_formset(self, request, obj=None, **kwargs): + kwargs["exclude"] = ["opening_band"] + return super().get_formset(request, obj, **kwargs) + + class BandAdmin(ModelAdmin): + inlines = [ConcertInline] + + ma = BandAdmin(Band, self.site) + self.assertEqual( + list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), + ["main_band", "day", "transport", "id", "DELETE"], + ) + + def test_formset_overriding_get_exclude_with_form_fields(self): + class AdminConcertForm(forms.ModelForm): + class Meta: + model = Concert + fields = ["main_band", "opening_band", "day", "transport"] + + class ConcertInline(TabularInline): + form = AdminConcertForm + fk_name = "main_band" + model = Concert + + def get_exclude(self, request, obj=None): + return ["opening_band"] + + class BandAdmin(ModelAdmin): + inlines = [ConcertInline] + + ma = BandAdmin(Band, self.site) + self.assertEqual( + list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), + ["main_band", "day", "transport", "id", "DELETE"], + ) + + def test_formset_overriding_get_exclude_with_form_exclude(self): + class AdminConcertForm(forms.ModelForm): + class Meta: + model = Concert + exclude = ["day"] + + class ConcertInline(TabularInline): + form = AdminConcertForm + fk_name = "main_band" + model = Concert + + def get_exclude(self, request, obj=None): + return ["opening_band"] + + class BandAdmin(ModelAdmin): + inlines = [ConcertInline] + + ma = BandAdmin(Band, self.site) + self.assertEqual( + list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), + ["main_band", "day", "transport", "id", "DELETE"], + ) + + def test_raw_id_fields_widget_override(self): + """ + The autocomplete_fields, raw_id_fields, and radio_fields widgets may + overridden by specifying a widget in get_formset(). + """ + + class ConcertInline(TabularInline): + model = Concert + fk_name = "main_band" + raw_id_fields = ("opening_band",) + + def get_formset(self, request, obj=None, **kwargs): + kwargs["widgets"] = {"opening_band": Select} + return super().get_formset(request, obj, **kwargs) + + class BandAdmin(ModelAdmin): + inlines = [ConcertInline] + + ma = BandAdmin(Band, self.site) + band_widget = ( + list(ma.get_formsets_with_inlines(request))[0][0]() + .forms[0] + .fields["opening_band"] + .widget + ) + # Without the override this would be ForeignKeyRawIdWidget. + self.assertIsInstance(band_widget, Select) + + def test_queryset_override(self): + # If the queryset of a ModelChoiceField in a custom form is overridden, + # RelatedFieldWidgetWrapper doesn't mess that up. + band2 = Band.objects.create( + name="The Beatles", bio="", sign_date=date(1962, 1, 1) + ) + + ma = ModelAdmin(Concert, self.site) + form = ma.get_form(request)() + + self.assertHTMLEqual( + str(form["main_band"]), + '" % (band2.id, self.band.id), + ) + + class AdminConcertForm(forms.ModelForm): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fields["main_band"].queryset = Band.objects.filter( + name="The Doors" + ) + + class ConcertAdminWithForm(ModelAdmin): + form = AdminConcertForm + + ma = ConcertAdminWithForm(Concert, self.site) + form = ma.get_form(request)() + + self.assertHTMLEqual( + str(form["main_band"]), + '" % self.band.id, + ) + + def test_regression_for_ticket_15820(self): + """ + `obj` is passed from `InlineModelAdmin.get_fieldsets()` to + `InlineModelAdmin.get_formset()`. + """ + + class CustomConcertForm(forms.ModelForm): + class Meta: + model = Concert + fields = ["day"] + + class ConcertInline(TabularInline): + model = Concert + fk_name = "main_band" + + def get_formset(self, request, obj=None, **kwargs): + if obj: + kwargs["form"] = CustomConcertForm + return super().get_formset(request, obj, **kwargs) + + class BandAdmin(ModelAdmin): + inlines = [ConcertInline] + + Concert.objects.create(main_band=self.band, opening_band=self.band, day=1) + ma = BandAdmin(Band, self.site) + inline_instances = ma.get_inline_instances(request) + fieldsets = list(inline_instances[0].get_fieldsets(request)) + self.assertEqual( + fieldsets[0][1]["fields"], ["main_band", "opening_band", "day", "transport"] + ) + fieldsets = list( + inline_instances[0].get_fieldsets(request, inline_instances[0].model) + ) + self.assertEqual(fieldsets[0][1]["fields"], ["day"]) + + # radio_fields behavior ########################################### + + def test_default_foreign_key_widget(self): + # First, without any radio_fields specified, the widgets for ForeignKey + # and fields with choices specified ought to be a basic Select widget. + # ForeignKey widgets in the admin are wrapped with RelatedFieldWidgetWrapper so + # they need to be handled properly when type checking. For Select fields, all of + # the choices lists have a first entry of dashes. + cma = ModelAdmin(Concert, self.site) + cmafa = cma.get_form(request) + + self.assertEqual(type(cmafa.base_fields["main_band"].widget.widget), Select) + self.assertEqual( + list(cmafa.base_fields["main_band"].widget.choices), + [("", "---------"), (self.band.id, "The Doors")], + ) + + self.assertEqual(type(cmafa.base_fields["opening_band"].widget.widget), Select) + self.assertEqual( + list(cmafa.base_fields["opening_band"].widget.choices), + [("", "---------"), (self.band.id, "The Doors")], + ) + self.assertEqual(type(cmafa.base_fields["day"].widget), Select) + self.assertEqual( + list(cmafa.base_fields["day"].widget.choices), + [("", "---------"), (1, "Fri"), (2, "Sat")], + ) + self.assertEqual(type(cmafa.base_fields["transport"].widget), Select) + self.assertEqual( + list(cmafa.base_fields["transport"].widget.choices), + [("", "---------"), (1, "Plane"), (2, "Train"), (3, "Bus")], + ) + + def test_foreign_key_as_radio_field(self): + # Now specify all the fields as radio_fields. Widgets should now be + # RadioSelect, and the choices list should have a first entry of 'None' if + # blank=True for the model field. Finally, the widget should have the + # 'radiolist' attr, and 'inline' as well if the field is specified HORIZONTAL. + class ConcertAdmin(ModelAdmin): + radio_fields = { + "main_band": HORIZONTAL, + "opening_band": VERTICAL, + "day": VERTICAL, + "transport": HORIZONTAL, + } + + cma = ConcertAdmin(Concert, self.site) + cmafa = cma.get_form(request) + + self.assertEqual( + type(cmafa.base_fields["main_band"].widget.widget), AdminRadioSelect + ) + self.assertEqual( + cmafa.base_fields["main_band"].widget.attrs, {"class": "radiolist inline"} + ) + self.assertEqual( + list(cmafa.base_fields["main_band"].widget.choices), + [(self.band.id, "The Doors")], + ) + + self.assertEqual( + type(cmafa.base_fields["opening_band"].widget.widget), AdminRadioSelect + ) + self.assertEqual( + cmafa.base_fields["opening_band"].widget.attrs, {"class": "radiolist"} + ) + self.assertEqual( + list(cmafa.base_fields["opening_band"].widget.choices), + [("", "None"), (self.band.id, "The Doors")], + ) + self.assertEqual(type(cmafa.base_fields["day"].widget), AdminRadioSelect) + self.assertEqual(cmafa.base_fields["day"].widget.attrs, {"class": "radiolist"}) + self.assertEqual( + list(cmafa.base_fields["day"].widget.choices), [(1, "Fri"), (2, "Sat")] + ) + + self.assertEqual(type(cmafa.base_fields["transport"].widget), AdminRadioSelect) + self.assertEqual( + cmafa.base_fields["transport"].widget.attrs, {"class": "radiolist inline"} + ) + self.assertEqual( + list(cmafa.base_fields["transport"].widget.choices), + [("", "None"), (1, "Plane"), (2, "Train"), (3, "Bus")], + ) + + class AdminConcertForm(forms.ModelForm): + class Meta: + model = Concert + exclude = ("transport",) + + class ConcertAdmin(ModelAdmin): + form = AdminConcertForm + + ma = ConcertAdmin(Concert, self.site) + self.assertEqual( + list(ma.get_form(request).base_fields), ["main_band", "opening_band", "day"] + ) + + class AdminConcertForm(forms.ModelForm): + extra = forms.CharField() + + class Meta: + model = Concert + fields = ["extra", "transport"] + + class ConcertAdmin(ModelAdmin): + form = AdminConcertForm + + ma = ConcertAdmin(Concert, self.site) + self.assertEqual(list(ma.get_form(request).base_fields), ["extra", "transport"]) + + class ConcertInline(TabularInline): + form = AdminConcertForm + model = Concert + fk_name = "main_band" + can_delete = True + + class BandAdmin(ModelAdmin): + inlines = [ConcertInline] + + ma = BandAdmin(Band, self.site) + self.assertEqual( + list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), + ["extra", "transport", "id", "DELETE", "main_band"], + ) + + def test_log_actions(self): + ma = ModelAdmin(Band, self.site) + mock_request = MockRequest() + mock_request.user = User.objects.create(username="bill") + content_type = get_content_type_for_model(self.band) + tests = ( + (ma.log_addition, ADDITION, {"added": {}}), + (ma.log_change, CHANGE, {"changed": {"fields": ["name", "bio"]}}), + (ma.log_deletion, DELETION, str(self.band)), + ) + for method, flag, message in tests: + with self.subTest(name=method.__name__): + created = method(mock_request, self.band, message) + fetched = LogEntry.objects.filter(action_flag=flag).latest("id") + self.assertEqual(created, fetched) + self.assertEqual(fetched.action_flag, flag) + self.assertEqual(fetched.content_type, content_type) + self.assertEqual(fetched.object_id, str(self.band.pk)) + self.assertEqual(fetched.user, mock_request.user) + if flag == DELETION: + self.assertEqual(fetched.change_message, "") + self.assertEqual(fetched.object_repr, message) + else: + self.assertEqual(fetched.change_message, str(message)) + self.assertEqual(fetched.object_repr, str(self.band)) + + def test_get_autocomplete_fields(self): + class NameAdmin(ModelAdmin): + search_fields = ["name"] + + class SongAdmin(ModelAdmin): + autocomplete_fields = ["featuring"] + fields = ["featuring", "band"] + + class OtherSongAdmin(SongAdmin): + def get_autocomplete_fields(self, request): + return ["band"] + + self.site.register(Band, NameAdmin) + try: + # Uses autocomplete_fields if not overridden. + model_admin = SongAdmin(Song, self.site) + form = model_admin.get_form(request)() + self.assertIsInstance( + form.fields["featuring"].widget.widget, AutocompleteSelectMultiple + ) + # Uses overridden get_autocomplete_fields + model_admin = OtherSongAdmin(Song, self.site) + form = model_admin.get_form(request)() + self.assertIsInstance(form.fields["band"].widget.widget, AutocompleteSelect) + finally: + self.site.unregister(Band) + + def test_get_deleted_objects(self): + mock_request = MockRequest() + mock_request.user = User.objects.create_superuser( + username="bob", email="bob@test.com", password="test" + ) + self.site.register(Band, ModelAdmin) + ma = self.site.get_model_admin(Band) + ( + deletable_objects, + model_count, + perms_needed, + protected, + ) = ma.get_deleted_objects([self.band], request) + self.assertEqual(deletable_objects, ["Band: The Doors"]) + self.assertEqual(model_count, {"bands": 1}) + self.assertEqual(perms_needed, set()) + self.assertEqual(protected, []) + + def test_get_deleted_objects_with_custom_has_delete_permission(self): + """ + ModelAdmin.get_deleted_objects() uses ModelAdmin.has_delete_permission() + for permissions checking. + """ + mock_request = MockRequest() + mock_request.user = User.objects.create_superuser( + username="bob", email="bob@test.com", password="test" + ) + + class TestModelAdmin(ModelAdmin): + def has_delete_permission(self, request, obj=None): + return False + + self.site.register(Band, TestModelAdmin) + ma = self.site.get_model_admin(Band) + ( + deletable_objects, + model_count, + perms_needed, + protected, + ) = ma.get_deleted_objects([self.band], request) + self.assertEqual(deletable_objects, ["Band: The Doors"]) + self.assertEqual(model_count, {"bands": 1}) + self.assertEqual(perms_needed, {"band"}) + self.assertEqual(protected, []) + + def test_modeladmin_repr(self): + ma = ModelAdmin(Band, self.site) + self.assertEqual( + repr(ma), + "", + ) + + +class ModelAdminPermissionTests(SimpleTestCase): + class MockUser: + def has_module_perms(self, app_label): + return app_label == "modeladmin" + + class MockViewUser(MockUser): + def has_perm(self, perm, obj=None): + return perm == "modeladmin.view_band" + + class MockAddUser(MockUser): + def has_perm(self, perm, obj=None): + return perm == "modeladmin.add_band" + + class MockChangeUser(MockUser): + def has_perm(self, perm, obj=None): + return perm == "modeladmin.change_band" + + class MockDeleteUser(MockUser): + def has_perm(self, perm, obj=None): + return perm == "modeladmin.delete_band" + + def test_has_view_permission(self): + """ + has_view_permission() returns True for users who can view objects and + False for users who can't. + """ + ma = ModelAdmin(Band, AdminSite()) + request = MockRequest() + request.user = self.MockViewUser() + self.assertIs(ma.has_view_permission(request), True) + request.user = self.MockAddUser() + self.assertIs(ma.has_view_permission(request), False) + request.user = self.MockChangeUser() + self.assertIs(ma.has_view_permission(request), True) + request.user = self.MockDeleteUser() + self.assertIs(ma.has_view_permission(request), False) + + def test_has_add_permission(self): + """ + has_add_permission returns True for users who can add objects and + False for users who can't. + """ + ma = ModelAdmin(Band, AdminSite()) + request = MockRequest() + request.user = self.MockViewUser() + self.assertFalse(ma.has_add_permission(request)) + request.user = self.MockAddUser() + self.assertTrue(ma.has_add_permission(request)) + request.user = self.MockChangeUser() + self.assertFalse(ma.has_add_permission(request)) + request.user = self.MockDeleteUser() + self.assertFalse(ma.has_add_permission(request)) + + def test_inline_has_add_permission_uses_obj(self): + class ConcertInline(TabularInline): + model = Concert + + def has_add_permission(self, request, obj): + return bool(obj) + + class BandAdmin(ModelAdmin): + inlines = [ConcertInline] + + ma = BandAdmin(Band, AdminSite()) + request = MockRequest() + request.user = self.MockAddUser() + self.assertEqual(ma.get_inline_instances(request), []) + band = Band(name="The Doors", bio="", sign_date=date(1965, 1, 1)) + inline_instances = ma.get_inline_instances(request, band) + self.assertEqual(len(inline_instances), 1) + self.assertIsInstance(inline_instances[0], ConcertInline) + + def test_has_change_permission(self): + """ + has_change_permission returns True for users who can edit objects and + False for users who can't. + """ + ma = ModelAdmin(Band, AdminSite()) + request = MockRequest() + request.user = self.MockViewUser() + self.assertIs(ma.has_change_permission(request), False) + request.user = self.MockAddUser() + self.assertFalse(ma.has_change_permission(request)) + request.user = self.MockChangeUser() + self.assertTrue(ma.has_change_permission(request)) + request.user = self.MockDeleteUser() + self.assertFalse(ma.has_change_permission(request)) + + def test_has_delete_permission(self): + """ + has_delete_permission returns True for users who can delete objects and + False for users who can't. + """ + ma = ModelAdmin(Band, AdminSite()) + request = MockRequest() + request.user = self.MockViewUser() + self.assertIs(ma.has_delete_permission(request), False) + request.user = self.MockAddUser() + self.assertFalse(ma.has_delete_permission(request)) + request.user = self.MockChangeUser() + self.assertFalse(ma.has_delete_permission(request)) + request.user = self.MockDeleteUser() + self.assertTrue(ma.has_delete_permission(request)) + + def test_has_module_permission(self): + """ + as_module_permission returns True for users who have any permission + for the module and False for users who don't. + """ + ma = ModelAdmin(Band, AdminSite()) + request = MockRequest() + request.user = self.MockViewUser() + self.assertIs(ma.has_module_permission(request), True) + request.user = self.MockAddUser() + self.assertTrue(ma.has_module_permission(request)) + request.user = self.MockChangeUser() + self.assertTrue(ma.has_module_permission(request)) + request.user = self.MockDeleteUser() + self.assertTrue(ma.has_module_permission(request)) + + original_app_label = ma.opts.app_label + ma.opts.app_label = "anotherapp" + try: + request.user = self.MockViewUser() + self.assertIs(ma.has_module_permission(request), False) + request.user = self.MockAddUser() + self.assertFalse(ma.has_module_permission(request)) + request.user = self.MockChangeUser() + self.assertFalse(ma.has_module_permission(request)) + request.user = self.MockDeleteUser() + self.assertFalse(ma.has_module_permission(request)) + finally: + ma.opts.app_label = original_app_label diff --git a/testbed/django__django/tests/multiple_database/__init__.py b/testbed/django__django/tests/multiple_database/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/multiple_database/fixtures/multidb-common.json b/testbed/django__django/tests/multiple_database/fixtures/multidb-common.json new file mode 100644 index 0000000000000000000000000000000000000000..33134173b94642ea4a67f9e5e900f0d31be42096 --- /dev/null +++ b/testbed/django__django/tests/multiple_database/fixtures/multidb-common.json @@ -0,0 +1,10 @@ +[ + { + "pk": 1, + "model": "multiple_database.book", + "fields": { + "title": "The Definitive Guide to Django", + "published": "2009-7-8" + } + } +] \ No newline at end of file diff --git a/testbed/django__django/tests/multiple_database/fixtures/multidb.default.json b/testbed/django__django/tests/multiple_database/fixtures/multidb.default.json new file mode 100644 index 0000000000000000000000000000000000000000..379b18a803ad6c4a898f5bda8495d634a9aed036 --- /dev/null +++ b/testbed/django__django/tests/multiple_database/fixtures/multidb.default.json @@ -0,0 +1,26 @@ +[ + { + "pk": 1, + "model": "multiple_database.person", + "fields": { + "name": "Marty Alchin" + } + }, + { + "pk": 2, + "model": "multiple_database.person", + "fields": { + "name": "George Vilches" + } + }, + { + "pk": 2, + "model": "multiple_database.book", + "fields": { + "title": "Pro Django", + "published": "2008-12-16", + "authors": [["Marty Alchin"]], + "editor": ["George Vilches"] + } + } +] diff --git a/testbed/django__django/tests/multiple_database/fixtures/multidb.other.json b/testbed/django__django/tests/multiple_database/fixtures/multidb.other.json new file mode 100644 index 0000000000000000000000000000000000000000..c64f4902015e75e26e4bf9d60d5b17a3964b8289 --- /dev/null +++ b/testbed/django__django/tests/multiple_database/fixtures/multidb.other.json @@ -0,0 +1,26 @@ +[ + { + "pk": 1, + "model": "multiple_database.person", + "fields": { + "name": "Mark Pilgrim" + } + }, + { + "pk": 2, + "model": "multiple_database.person", + "fields": { + "name": "Chris Mills" + } + }, + { + "pk": 2, + "model": "multiple_database.book", + "fields": { + "title": "Dive into Python", + "published": "2009-5-4", + "authors": [["Mark Pilgrim"]], + "editor": ["Chris Mills"] + } + } +] \ No newline at end of file diff --git a/testbed/django__django/tests/multiple_database/fixtures/pets.json b/testbed/django__django/tests/multiple_database/fixtures/pets.json new file mode 100644 index 0000000000000000000000000000000000000000..89756a3e5bc648f9dd5db7059b6c2e5a73cb5551 --- /dev/null +++ b/testbed/django__django/tests/multiple_database/fixtures/pets.json @@ -0,0 +1,18 @@ +[ + { + "pk": 1, + "model": "multiple_database.pet", + "fields": { + "name": "Mr Bigglesworth", + "owner": 1 + } + }, + { + "pk": 2, + "model": "multiple_database.pet", + "fields": { + "name": "Spot", + "owner": 2 + } + } +] \ No newline at end of file diff --git a/testbed/django__django/tests/multiple_database/models.py b/testbed/django__django/tests/multiple_database/models.py new file mode 100644 index 0000000000000000000000000000000000000000..7de784e149109b60e3e54baf878169406221bbc6 --- /dev/null +++ b/testbed/django__django/tests/multiple_database/models.py @@ -0,0 +1,81 @@ +from django.contrib.auth.models import User +from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation +from django.contrib.contenttypes.models import ContentType +from django.db import models + + +class Review(models.Model): + source = models.CharField(max_length=100) + content_type = models.ForeignKey(ContentType, models.CASCADE) + object_id = models.PositiveIntegerField() + content_object = GenericForeignKey() + + class Meta: + ordering = ("source",) + + def __str__(self): + return self.source + + +class PersonManager(models.Manager): + def get_by_natural_key(self, name): + return self.get(name=name) + + +class Person(models.Model): + name = models.CharField(max_length=100, unique=True) + + objects = PersonManager() + + class Meta: + ordering = ("name",) + + def __str__(self): + return self.name + + +# This book manager doesn't do anything interesting; it just +# exists to strip out the 'extra_arg' argument to certain +# calls. This argument is used to establish that the BookManager +# is actually getting used when it should be. +class BookManager(models.Manager): + def create(self, *args, extra_arg=None, **kwargs): + return super().create(*args, **kwargs) + + def get_or_create(self, *args, extra_arg=None, **kwargs): + return super().get_or_create(*args, **kwargs) + + +class Book(models.Model): + title = models.CharField(max_length=100) + published = models.DateField() + authors = models.ManyToManyField(Person) + editor = models.ForeignKey( + Person, models.SET_NULL, null=True, related_name="edited" + ) + reviews = GenericRelation(Review) + pages = models.IntegerField(default=100) + + objects = BookManager() + + class Meta: + ordering = ("title",) + + def __str__(self): + return self.title + + +class Pet(models.Model): + name = models.CharField(max_length=100) + owner = models.ForeignKey(Person, models.CASCADE) + + class Meta: + ordering = ("name",) + + +class UserProfile(models.Model): + user = models.OneToOneField(User, models.SET_NULL, null=True) + flavor = models.CharField(max_length=100) + + class Meta: + ordering = ("flavor",) diff --git a/testbed/django__django/tests/multiple_database/routers.py b/testbed/django__django/tests/multiple_database/routers.py new file mode 100644 index 0000000000000000000000000000000000000000..0cc7f1729c909d24942192710348a54044c0c2a4 --- /dev/null +++ b/testbed/django__django/tests/multiple_database/routers.py @@ -0,0 +1,61 @@ +from django.db import DEFAULT_DB_ALIAS + + +class TestRouter: + """ + Vaguely behave like primary/replica, but the databases aren't assumed to + propagate changes. + """ + + def db_for_read(self, model, instance=None, **hints): + if instance: + return instance._state.db or "other" + return "other" + + def db_for_write(self, model, **hints): + return DEFAULT_DB_ALIAS + + def allow_relation(self, obj1, obj2, **hints): + return obj1._state.db in ("default", "other") and obj2._state.db in ( + "default", + "other", + ) + + def allow_migrate(self, db, app_label, **hints): + return True + + +class AuthRouter: + """ + Control all database operations on models in the contrib.auth application. + """ + + def db_for_read(self, model, **hints): + "Point all read operations on auth models to 'default'" + if model._meta.app_label == "auth": + # We use default here to ensure we can tell the difference + # between a read request and a write request for Auth objects + return "default" + return None + + def db_for_write(self, model, **hints): + "Point all operations on auth models to 'other'" + if model._meta.app_label == "auth": + return "other" + return None + + def allow_relation(self, obj1, obj2, **hints): + "Allow any relation if a model in Auth is involved" + return obj1._meta.app_label == "auth" or obj2._meta.app_label == "auth" or None + + def allow_migrate(self, db, app_label, **hints): + "Make sure the auth app only appears on the 'other' db" + if app_label == "auth": + return db == "other" + return None + + +class WriteRouter: + # A router that only expresses an opinion on writes + def db_for_write(self, model, **hints): + return "writer" diff --git a/testbed/django__django/tests/mutually_referential/__init__.py b/testbed/django__django/tests/mutually_referential/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/mutually_referential/models.py b/testbed/django__django/tests/mutually_referential/models.py new file mode 100644 index 0000000000000000000000000000000000000000..d82bfb4518fcdcca11b11a51e24ca0c0393deedb --- /dev/null +++ b/testbed/django__django/tests/mutually_referential/models.py @@ -0,0 +1,23 @@ +""" +Mutually referential many-to-one relationships + +Strings can be used instead of model literals to set up "lazy" relations. +""" + +from django.db import models + + +class Parent(models.Model): + name = models.CharField(max_length=100) + + # Use a simple string for forward declarations. + bestchild = models.ForeignKey( + "Child", models.SET_NULL, null=True, related_name="favored_by" + ) + + +class Child(models.Model): + name = models.CharField(max_length=100) + + # You can also explicitly specify the related app. + parent = models.ForeignKey("mutually_referential.Parent", models.CASCADE) diff --git a/testbed/django__django/tests/mutually_referential/tests.py b/testbed/django__django/tests/mutually_referential/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..d54c03ec9a4c2fb95a59f9379a281a9444930c06 --- /dev/null +++ b/testbed/django__django/tests/mutually_referential/tests.py @@ -0,0 +1,21 @@ +from django.test import TestCase + +from .models import Parent + + +class MutuallyReferentialTests(TestCase): + def test_mutually_referential(self): + # Create a Parent + q = Parent(name="Elizabeth") + q.save() + + # Create some children + c = q.child_set.create(name="Charles") + q.child_set.create(name="Edward") + + # Set the best child + # No assertion require here; if basic assignment and + # deletion works, the test passes. + q.bestchild = c + q.save() + q.delete() diff --git a/testbed/django__django/tests/nested_foreign_keys/__init__.py b/testbed/django__django/tests/nested_foreign_keys/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/nested_foreign_keys/tests.py b/testbed/django__django/tests/nested_foreign_keys/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..840d5f21fa1d7017f9565b965031c05c1c3ac36b --- /dev/null +++ b/testbed/django__django/tests/nested_foreign_keys/tests.py @@ -0,0 +1,308 @@ +from django.test import TestCase + +from .models import ( + Event, + Movie, + Package, + PackageNullFK, + Person, + Screening, + ScreeningNullFK, +) + + +# These are tests for #16715. The basic scheme is always the same: 3 models with +# 2 relations. The first relation may be null, while the second is non-nullable. +# In some cases, Django would pick the wrong join type for the second relation, +# resulting in missing objects in the queryset. +# +# Model A +# | (Relation A/B : nullable) +# Model B +# | (Relation B/C : non-nullable) +# Model C +# +# Because of the possibility of NULL rows resulting from the LEFT OUTER JOIN +# between Model A and Model B (i.e. instances of A without reference to B), +# the second join must also be LEFT OUTER JOIN, so that we do not ignore +# instances of A that do not reference B. +# +# Relation A/B can either be an explicit foreign key or an implicit reverse +# relation such as introduced by one-to-one relations (through multi-table +# inheritance). +class NestedForeignKeysTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.director = Person.objects.create(name="Terry Gilliam / Terry Jones") + cls.movie = Movie.objects.create( + title="Monty Python and the Holy Grail", director=cls.director + ) + + # This test failed in #16715 because in some cases INNER JOIN was selected + # for the second foreign key relation instead of LEFT OUTER JOIN. + def test_inheritance(self): + Event.objects.create() + Screening.objects.create(movie=self.movie) + + self.assertEqual(len(Event.objects.all()), 2) + self.assertEqual(len(Event.objects.select_related("screening")), 2) + # This failed. + self.assertEqual(len(Event.objects.select_related("screening__movie")), 2) + + self.assertEqual(len(Event.objects.values()), 2) + self.assertEqual(len(Event.objects.values("screening__pk")), 2) + self.assertEqual(len(Event.objects.values("screening__movie__pk")), 2) + self.assertEqual(len(Event.objects.values("screening__movie__title")), 2) + # This failed. + self.assertEqual( + len( + Event.objects.values("screening__movie__pk", "screening__movie__title") + ), + 2, + ) + + # Simple filter/exclude queries for good measure. + self.assertEqual(Event.objects.filter(screening__movie=self.movie).count(), 1) + self.assertEqual(Event.objects.exclude(screening__movie=self.movie).count(), 1) + + # These all work because the second foreign key in the chain has null=True. + def test_inheritance_null_FK(self): + Event.objects.create() + ScreeningNullFK.objects.create(movie=None) + ScreeningNullFK.objects.create(movie=self.movie) + + self.assertEqual(len(Event.objects.all()), 3) + self.assertEqual(len(Event.objects.select_related("screeningnullfk")), 3) + self.assertEqual(len(Event.objects.select_related("screeningnullfk__movie")), 3) + + self.assertEqual(len(Event.objects.values()), 3) + self.assertEqual(len(Event.objects.values("screeningnullfk__pk")), 3) + self.assertEqual(len(Event.objects.values("screeningnullfk__movie__pk")), 3) + self.assertEqual(len(Event.objects.values("screeningnullfk__movie__title")), 3) + self.assertEqual( + len( + Event.objects.values( + "screeningnullfk__movie__pk", "screeningnullfk__movie__title" + ) + ), + 3, + ) + + self.assertEqual( + Event.objects.filter(screeningnullfk__movie=self.movie).count(), 1 + ) + self.assertEqual( + Event.objects.exclude(screeningnullfk__movie=self.movie).count(), 2 + ) + + def test_null_exclude(self): + screening = ScreeningNullFK.objects.create(movie=None) + ScreeningNullFK.objects.create(movie=self.movie) + self.assertEqual( + list(ScreeningNullFK.objects.exclude(movie__id=self.movie.pk)), [screening] + ) + + # This test failed in #16715 because in some cases INNER JOIN was selected + # for the second foreign key relation instead of LEFT OUTER JOIN. + def test_explicit_ForeignKey(self): + Package.objects.create() + screening = Screening.objects.create(movie=self.movie) + Package.objects.create(screening=screening) + + self.assertEqual(len(Package.objects.all()), 2) + self.assertEqual(len(Package.objects.select_related("screening")), 2) + self.assertEqual(len(Package.objects.select_related("screening__movie")), 2) + + self.assertEqual(len(Package.objects.values()), 2) + self.assertEqual(len(Package.objects.values("screening__pk")), 2) + self.assertEqual(len(Package.objects.values("screening__movie__pk")), 2) + self.assertEqual(len(Package.objects.values("screening__movie__title")), 2) + # This failed. + self.assertEqual( + len( + Package.objects.values( + "screening__movie__pk", "screening__movie__title" + ) + ), + 2, + ) + + self.assertEqual(Package.objects.filter(screening__movie=self.movie).count(), 1) + self.assertEqual( + Package.objects.exclude(screening__movie=self.movie).count(), 1 + ) + + # These all work because the second foreign key in the chain has null=True. + def test_explicit_ForeignKey_NullFK(self): + PackageNullFK.objects.create() + screening = ScreeningNullFK.objects.create(movie=None) + screening_with_movie = ScreeningNullFK.objects.create(movie=self.movie) + PackageNullFK.objects.create(screening=screening) + PackageNullFK.objects.create(screening=screening_with_movie) + + self.assertEqual(len(PackageNullFK.objects.all()), 3) + self.assertEqual(len(PackageNullFK.objects.select_related("screening")), 3) + self.assertEqual( + len(PackageNullFK.objects.select_related("screening__movie")), 3 + ) + + self.assertEqual(len(PackageNullFK.objects.values()), 3) + self.assertEqual(len(PackageNullFK.objects.values("screening__pk")), 3) + self.assertEqual(len(PackageNullFK.objects.values("screening__movie__pk")), 3) + self.assertEqual( + len(PackageNullFK.objects.values("screening__movie__title")), 3 + ) + self.assertEqual( + len( + PackageNullFK.objects.values( + "screening__movie__pk", "screening__movie__title" + ) + ), + 3, + ) + + self.assertEqual( + PackageNullFK.objects.filter(screening__movie=self.movie).count(), 1 + ) + self.assertEqual( + PackageNullFK.objects.exclude(screening__movie=self.movie).count(), 2 + ) + + +# Some additional tests for #16715. The only difference is the depth of the +# nesting as we now use 4 models instead of 3 (and thus 3 relations). This +# checks if promotion of join types works for deeper nesting too. +class DeeplyNestedForeignKeysTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.director = Person.objects.create(name="Terry Gilliam / Terry Jones") + cls.movie = Movie.objects.create( + title="Monty Python and the Holy Grail", director=cls.director + ) + + def test_inheritance(self): + Event.objects.create() + Screening.objects.create(movie=self.movie) + + self.assertEqual(len(Event.objects.all()), 2) + self.assertEqual( + len(Event.objects.select_related("screening__movie__director")), 2 + ) + + self.assertEqual(len(Event.objects.values()), 2) + self.assertEqual(len(Event.objects.values("screening__movie__director__pk")), 2) + self.assertEqual( + len(Event.objects.values("screening__movie__director__name")), 2 + ) + self.assertEqual( + len( + Event.objects.values( + "screening__movie__director__pk", "screening__movie__director__name" + ) + ), + 2, + ) + self.assertEqual( + len( + Event.objects.values( + "screening__movie__pk", "screening__movie__director__pk" + ) + ), + 2, + ) + self.assertEqual( + len( + Event.objects.values( + "screening__movie__pk", "screening__movie__director__name" + ) + ), + 2, + ) + self.assertEqual( + len( + Event.objects.values( + "screening__movie__title", "screening__movie__director__pk" + ) + ), + 2, + ) + self.assertEqual( + len( + Event.objects.values( + "screening__movie__title", "screening__movie__director__name" + ) + ), + 2, + ) + + self.assertEqual( + Event.objects.filter(screening__movie__director=self.director).count(), 1 + ) + self.assertEqual( + Event.objects.exclude(screening__movie__director=self.director).count(), 1 + ) + + def test_explicit_ForeignKey(self): + Package.objects.create() + screening = Screening.objects.create(movie=self.movie) + Package.objects.create(screening=screening) + + self.assertEqual(len(Package.objects.all()), 2) + self.assertEqual( + len(Package.objects.select_related("screening__movie__director")), 2 + ) + + self.assertEqual(len(Package.objects.values()), 2) + self.assertEqual( + len(Package.objects.values("screening__movie__director__pk")), 2 + ) + self.assertEqual( + len(Package.objects.values("screening__movie__director__name")), 2 + ) + self.assertEqual( + len( + Package.objects.values( + "screening__movie__director__pk", "screening__movie__director__name" + ) + ), + 2, + ) + self.assertEqual( + len( + Package.objects.values( + "screening__movie__pk", "screening__movie__director__pk" + ) + ), + 2, + ) + self.assertEqual( + len( + Package.objects.values( + "screening__movie__pk", "screening__movie__director__name" + ) + ), + 2, + ) + self.assertEqual( + len( + Package.objects.values( + "screening__movie__title", "screening__movie__director__pk" + ) + ), + 2, + ) + self.assertEqual( + len( + Package.objects.values( + "screening__movie__title", "screening__movie__director__name" + ) + ), + 2, + ) + + self.assertEqual( + Package.objects.filter(screening__movie__director=self.director).count(), 1 + ) + self.assertEqual( + Package.objects.exclude(screening__movie__director=self.director).count(), 1 + ) diff --git a/testbed/django__django/tests/no_models/__init__.py b/testbed/django__django/tests/no_models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/no_models/tests.py b/testbed/django__django/tests/no_models/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..d20a834e303f4b725196a05ff36fad53c3e5a7aa --- /dev/null +++ b/testbed/django__django/tests/no_models/tests.py @@ -0,0 +1,9 @@ +from django.apps import apps +from django.test import SimpleTestCase + + +class NoModelTests(SimpleTestCase): + def test_no_models(self): + """It's possible to load an app with no models.py file.""" + app_config = apps.get_app_config("no_models") + self.assertIsNone(app_config.models_module) diff --git a/testbed/django__django/tests/null_fk/__init__.py b/testbed/django__django/tests/null_fk/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/null_queries/__init__.py b/testbed/django__django/tests/null_queries/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/null_queries/tests.py b/testbed/django__django/tests/null_queries/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..828c68d9214d853986ee3e1bfa28dae93e9593ad --- /dev/null +++ b/testbed/django__django/tests/null_queries/tests.py @@ -0,0 +1,75 @@ +from django.core.exceptions import FieldError +from django.test import TestCase + +from .models import Choice, Inner, OuterA, OuterB, Poll + + +class NullQueriesTests(TestCase): + def test_none_as_null(self): + """ + Regression test for the use of None as a query value. + + None is interpreted as an SQL NULL, but only in __exact and __iexact + queries. + Set up some initial polls and choices + """ + p1 = Poll(question="Why?") + p1.save() + c1 = Choice(poll=p1, choice="Because.") + c1.save() + c2 = Choice(poll=p1, choice="Why Not?") + c2.save() + + # Exact query with value None returns nothing ("is NULL" in sql, + # but every 'id' field has a value). + self.assertSequenceEqual(Choice.objects.filter(choice__exact=None), []) + + # The same behavior for iexact query. + self.assertSequenceEqual(Choice.objects.filter(choice__iexact=None), []) + + # Excluding the previous result returns everything. + self.assertSequenceEqual( + Choice.objects.exclude(choice=None).order_by("id"), [c1, c2] + ) + + # Valid query, but fails because foo isn't a keyword + msg = ( + "Cannot resolve keyword 'foo' into field. Choices are: choice, id, poll, " + "poll_id" + ) + with self.assertRaisesMessage(FieldError, msg): + Choice.objects.filter(foo__exact=None) + + # Can't use None on anything other than __exact and __iexact + with self.assertRaisesMessage(ValueError, "Cannot use None as a query value"): + Choice.objects.filter(id__gt=None) + + def test_unsaved(self): + poll = Poll(question="How?") + msg = ( + "'Poll' instance needs to have a primary key value before this " + "relationship can be used." + ) + with self.assertRaisesMessage(ValueError, msg): + poll.choice_set.all() + + def test_reverse_relations(self): + """ + Querying across reverse relations and then another relation should + insert outer joins correctly so as not to exclude results. + """ + obj = OuterA.objects.create() + self.assertSequenceEqual(OuterA.objects.filter(inner__third=None), [obj]) + self.assertSequenceEqual(OuterA.objects.filter(inner__third__data=None), [obj]) + + inner = Inner.objects.create(first=obj) + self.assertSequenceEqual( + Inner.objects.filter(first__inner__third=None), [inner] + ) + + # Ticket #13815: check if _isnull=False does not produce + # faulty empty lists + outerb = OuterB.objects.create(data="reverse") + self.assertSequenceEqual(OuterB.objects.filter(inner__isnull=False), []) + Inner.objects.create(first=obj) + self.assertSequenceEqual(OuterB.objects.exclude(inner__isnull=False), [outerb]) diff --git a/testbed/django__django/tests/one_to_one/__init__.py b/testbed/django__django/tests/one_to_one/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/order_with_respect_to/tests.py b/testbed/django__django/tests/order_with_respect_to/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..36828394449d8cde904d1fab288bef61e22a8dea --- /dev/null +++ b/testbed/django__django/tests/order_with_respect_to/tests.py @@ -0,0 +1,49 @@ +from operator import attrgetter + +from django.db import models +from django.test import SimpleTestCase, TestCase +from django.test.utils import isolate_apps + +from .base_tests import BaseOrderWithRespectToTests +from .models import Answer, Dimension, Entity, Post, Question + + +class OrderWithRespectToBaseTests(BaseOrderWithRespectToTests, TestCase): + Answer = Answer + Post = Post + Question = Question + + +class OrderWithRespectToTests(SimpleTestCase): + @isolate_apps("order_with_respect_to") + def test_duplicate_order_field(self): + class Bar(models.Model): + class Meta: + app_label = "order_with_respect_to" + + class Foo(models.Model): + bar = models.ForeignKey(Bar, models.CASCADE) + order = models.OrderWrt() + + class Meta: + order_with_respect_to = "bar" + app_label = "order_with_respect_to" + + count = 0 + for field in Foo._meta.local_fields: + if isinstance(field, models.OrderWrt): + count += 1 + + self.assertEqual(count, 1) + + +class TestOrderWithRespectToOneToOnePK(TestCase): + def test_set_order(self): + e = Entity.objects.create() + d = Dimension.objects.create(entity=e) + c1 = d.component_set.create() + c2 = d.component_set.create() + d.set_component_order([c1.id, c2.id]) + self.assertQuerySetEqual( + d.component_set.all(), [c1.id, c2.id], attrgetter("pk") + ) diff --git a/testbed/django__django/tests/queries/test_q.py b/testbed/django__django/tests/queries/test_q.py new file mode 100644 index 0000000000000000000000000000000000000000..cdf40292b06a85a7d5fc49487a32edb9df724ae8 --- /dev/null +++ b/testbed/django__django/tests/queries/test_q.py @@ -0,0 +1,263 @@ +from django.core.exceptions import FieldError +from django.db.models import ( + BooleanField, + Exists, + ExpressionWrapper, + F, + OuterRef, + Q, + Value, +) +from django.db.models.expressions import NegatedExpression, RawSQL +from django.db.models.functions import Lower +from django.db.models.sql.where import NothingNode +from django.test import SimpleTestCase, TestCase + +from .models import Tag + + +class QTests(SimpleTestCase): + def test_combine_and_empty(self): + q = Q(x=1) + self.assertEqual(q & Q(), q) + self.assertEqual(Q() & q, q) + + q = Q(x__in={}.keys()) + self.assertEqual(q & Q(), q) + self.assertEqual(Q() & q, q) + + def test_combine_and_both_empty(self): + self.assertEqual(Q() & Q(), Q()) + + def test_combine_or_empty(self): + q = Q(x=1) + self.assertEqual(q | Q(), q) + self.assertEqual(Q() | q, q) + + q = Q(x__in={}.keys()) + self.assertEqual(q | Q(), q) + self.assertEqual(Q() | q, q) + + def test_combine_xor_empty(self): + q = Q(x=1) + self.assertEqual(q ^ Q(), q) + self.assertEqual(Q() ^ q, q) + + q = Q(x__in={}.keys()) + self.assertEqual(q ^ Q(), q) + self.assertEqual(Q() ^ q, q) + + def test_combine_empty_copy(self): + base_q = Q(x=1) + tests = [ + base_q | Q(), + Q() | base_q, + base_q & Q(), + Q() & base_q, + base_q ^ Q(), + Q() ^ base_q, + ] + for i, q in enumerate(tests): + with self.subTest(i=i): + self.assertEqual(q, base_q) + self.assertIsNot(q, base_q) + + def test_combine_or_both_empty(self): + self.assertEqual(Q() | Q(), Q()) + + def test_combine_xor_both_empty(self): + self.assertEqual(Q() ^ Q(), Q()) + + def test_combine_not_q_object(self): + obj = object() + q = Q(x=1) + with self.assertRaisesMessage(TypeError, str(obj)): + q | obj + with self.assertRaisesMessage(TypeError, str(obj)): + q & obj + with self.assertRaisesMessage(TypeError, str(obj)): + q ^ obj + + def test_combine_negated_boolean_expression(self): + tagged = Tag.objects.filter(category=OuterRef("pk")) + tests = [ + Q() & ~Exists(tagged), + Q() | ~Exists(tagged), + Q() ^ ~Exists(tagged), + ] + for q in tests: + with self.subTest(q=q): + self.assertIsInstance(q, NegatedExpression) + + def test_deconstruct(self): + q = Q(price__gt=F("discounted_price")) + path, args, kwargs = q.deconstruct() + self.assertEqual(path, "django.db.models.Q") + self.assertEqual(args, (("price__gt", F("discounted_price")),)) + self.assertEqual(kwargs, {}) + + def test_deconstruct_negated(self): + q = ~Q(price__gt=F("discounted_price")) + path, args, kwargs = q.deconstruct() + self.assertEqual(args, (("price__gt", F("discounted_price")),)) + self.assertEqual(kwargs, {"_negated": True}) + + def test_deconstruct_or(self): + q1 = Q(price__gt=F("discounted_price")) + q2 = Q(price=F("discounted_price")) + q = q1 | q2 + path, args, kwargs = q.deconstruct() + self.assertEqual( + args, + ( + ("price__gt", F("discounted_price")), + ("price", F("discounted_price")), + ), + ) + self.assertEqual(kwargs, {"_connector": Q.OR}) + + def test_deconstruct_xor(self): + q1 = Q(price__gt=F("discounted_price")) + q2 = Q(price=F("discounted_price")) + q = q1 ^ q2 + path, args, kwargs = q.deconstruct() + self.assertEqual( + args, + ( + ("price__gt", F("discounted_price")), + ("price", F("discounted_price")), + ), + ) + self.assertEqual(kwargs, {"_connector": Q.XOR}) + + def test_deconstruct_and(self): + q1 = Q(price__gt=F("discounted_price")) + q2 = Q(price=F("discounted_price")) + q = q1 & q2 + path, args, kwargs = q.deconstruct() + self.assertEqual( + args, + ( + ("price__gt", F("discounted_price")), + ("price", F("discounted_price")), + ), + ) + self.assertEqual(kwargs, {}) + + def test_deconstruct_multiple_kwargs(self): + q = Q(price__gt=F("discounted_price"), price=F("discounted_price")) + path, args, kwargs = q.deconstruct() + self.assertEqual( + args, + ( + ("price", F("discounted_price")), + ("price__gt", F("discounted_price")), + ), + ) + self.assertEqual(kwargs, {}) + + def test_deconstruct_nested(self): + q = Q(Q(price__gt=F("discounted_price"))) + path, args, kwargs = q.deconstruct() + self.assertEqual(args, (Q(price__gt=F("discounted_price")),)) + self.assertEqual(kwargs, {}) + + def test_deconstruct_boolean_expression(self): + expr = RawSQL("1 = 1", BooleanField()) + q = Q(expr) + _, args, kwargs = q.deconstruct() + self.assertEqual(args, (expr,)) + self.assertEqual(kwargs, {}) + + def test_reconstruct(self): + q = Q(price__gt=F("discounted_price")) + path, args, kwargs = q.deconstruct() + self.assertEqual(Q(*args, **kwargs), q) + + def test_reconstruct_negated(self): + q = ~Q(price__gt=F("discounted_price")) + path, args, kwargs = q.deconstruct() + self.assertEqual(Q(*args, **kwargs), q) + + def test_reconstruct_or(self): + q1 = Q(price__gt=F("discounted_price")) + q2 = Q(price=F("discounted_price")) + q = q1 | q2 + path, args, kwargs = q.deconstruct() + self.assertEqual(Q(*args, **kwargs), q) + + def test_reconstruct_xor(self): + q1 = Q(price__gt=F("discounted_price")) + q2 = Q(price=F("discounted_price")) + q = q1 ^ q2 + path, args, kwargs = q.deconstruct() + self.assertEqual(Q(*args, **kwargs), q) + + def test_reconstruct_and(self): + q1 = Q(price__gt=F("discounted_price")) + q2 = Q(price=F("discounted_price")) + q = q1 & q2 + path, args, kwargs = q.deconstruct() + self.assertEqual(Q(*args, **kwargs), q) + + def test_flatten(self): + q = Q() + self.assertEqual(list(q.flatten()), [q]) + q = Q(NothingNode()) + self.assertEqual(list(q.flatten()), [q, q.children[0]]) + q = Q( + ExpressionWrapper( + Q(RawSQL("id = 0", params=(), output_field=BooleanField())) + | Q(price=Value("4.55")) + | Q(name=Lower("category")), + output_field=BooleanField(), + ) + ) + flatten = list(q.flatten()) + self.assertEqual(len(flatten), 7) + + def test_create_helper(self): + items = [("a", 1), ("b", 2), ("c", 3)] + for connector in [Q.AND, Q.OR, Q.XOR]: + with self.subTest(connector=connector): + self.assertEqual( + Q.create(items, connector=connector), + Q(*items, _connector=connector), + ) + + +class QCheckTests(TestCase): + def test_basic(self): + q = Q(price__gt=20) + self.assertIs(q.check({"price": 30}), True) + self.assertIs(q.check({"price": 10}), False) + + def test_expression(self): + q = Q(name="test") + self.assertIs(q.check({"name": Lower(Value("TeSt"))}), True) + self.assertIs(q.check({"name": Value("other")}), False) + + def test_missing_field(self): + q = Q(description__startswith="prefix") + msg = "Cannot resolve keyword 'description' into field." + with self.assertRaisesMessage(FieldError, msg): + q.check({"name": "test"}) + + def test_boolean_expression(self): + q = Q(ExpressionWrapper(Q(price__gt=20), output_field=BooleanField())) + self.assertIs(q.check({"price": 25}), True) + self.assertIs(q.check({"price": Value(10)}), False) + + def test_rawsql(self): + """ + RawSQL expressions cause a database error because "price" cannot be + replaced by its value. In this case, Q.check() logs a warning and + return True. + """ + q = Q(RawSQL("price > %s", params=(20,), output_field=BooleanField())) + with self.assertLogs("django.db.models", "WARNING") as cm: + self.assertIs(q.check({"price": 10}), True) + self.assertIn( + f"Got a database error calling check() on {q!r}: ", + cm.records[0].getMessage(), + ) diff --git a/testbed/django__django/tests/raw_query/models.py b/testbed/django__django/tests/raw_query/models.py new file mode 100644 index 0000000000000000000000000000000000000000..a8ccc11147af8e7311f5bd6ef5e4135086ad2da3 --- /dev/null +++ b/testbed/django__django/tests/raw_query/models.py @@ -0,0 +1,47 @@ +from django.db import models + + +class Author(models.Model): + first_name = models.CharField(max_length=255) + last_name = models.CharField(max_length=255) + dob = models.DateField() + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # Protect against annotations being passed to __init__ -- + # this'll make the test suite get angry if annotations aren't + # treated differently than fields. + for k in kwargs: + assert k in [f.attname for f in self._meta.fields], ( + "Author.__init__ got an unexpected parameter: %s" % k + ) + + +class Book(models.Model): + title = models.CharField(max_length=255) + author = models.ForeignKey(Author, models.CASCADE) + paperback = models.BooleanField(default=False) + opening_line = models.TextField() + + +class BookFkAsPk(models.Model): + book = models.ForeignKey( + Book, models.CASCADE, primary_key=True, db_column="not_the_default" + ) + + +class Coffee(models.Model): + brand = models.CharField(max_length=255, db_column="name") + price = models.DecimalField(max_digits=10, decimal_places=2, default=0) + + +class MixedCaseIDColumn(models.Model): + id = models.AutoField(primary_key=True, db_column="MiXeD_CaSe_Id") + + +class Reviewer(models.Model): + reviewed = models.ManyToManyField(Book) + + +class FriendlyAuthor(Author): + pass diff --git a/testbed/django__django/tests/requests_tests/__init__.py b/testbed/django__django/tests/requests_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/requests_tests/test_accept_header.py b/testbed/django__django/tests/requests_tests/test_accept_header.py new file mode 100644 index 0000000000000000000000000000000000000000..c6eed0e4793e236b50f5a785361220316dbc7f3e --- /dev/null +++ b/testbed/django__django/tests/requests_tests/test_accept_header.py @@ -0,0 +1,103 @@ +from unittest import TestCase + +from django.http import HttpRequest +from django.http.request import MediaType + + +class MediaTypeTests(TestCase): + def test_empty(self): + for empty_media_type in (None, ""): + with self.subTest(media_type=empty_media_type): + media_type = MediaType(empty_media_type) + self.assertIs(media_type.is_all_types, False) + self.assertEqual(str(media_type), "") + self.assertEqual(repr(media_type), "") + + def test_str(self): + self.assertEqual(str(MediaType("*/*; q=0.8")), "*/*; q=0.8") + self.assertEqual(str(MediaType("application/xml")), "application/xml") + + def test_repr(self): + self.assertEqual(repr(MediaType("*/*; q=0.8")), "") + self.assertEqual( + repr(MediaType("application/xml")), + "", + ) + + def test_is_all_types(self): + self.assertIs(MediaType("*/*").is_all_types, True) + self.assertIs(MediaType("*/*; q=0.8").is_all_types, True) + self.assertIs(MediaType("text/*").is_all_types, False) + self.assertIs(MediaType("application/xml").is_all_types, False) + + def test_match(self): + tests = [ + ("*/*; q=0.8", "*/*"), + ("*/*", "application/json"), + (" */* ", "application/json"), + ("application/*", "application/json"), + ("application/xml", "application/xml"), + (" application/xml ", "application/xml"), + ("application/xml", " application/xml "), + ] + for accepted_type, mime_type in tests: + with self.subTest(accepted_type, mime_type=mime_type): + self.assertIs(MediaType(accepted_type).match(mime_type), True) + + def test_no_match(self): + tests = [ + (None, "*/*"), + ("", "*/*"), + ("; q=0.8", "*/*"), + ("application/xml", "application/html"), + ("application/xml", "*/*"), + ] + for accepted_type, mime_type in tests: + with self.subTest(accepted_type, mime_type=mime_type): + self.assertIs(MediaType(accepted_type).match(mime_type), False) + + +class AcceptHeaderTests(TestCase): + def test_no_headers(self): + """Absence of Accept header defaults to '*/*'.""" + request = HttpRequest() + self.assertEqual( + [str(accepted_type) for accepted_type in request.accepted_types], + ["*/*"], + ) + + def test_accept_headers(self): + request = HttpRequest() + request.META[ + "HTTP_ACCEPT" + ] = "text/html, application/xhtml+xml,application/xml ;q=0.9,*/*;q=0.8" + self.assertEqual( + [str(accepted_type) for accepted_type in request.accepted_types], + [ + "text/html", + "application/xhtml+xml", + "application/xml; q=0.9", + "*/*; q=0.8", + ], + ) + + def test_request_accepts_any(self): + request = HttpRequest() + request.META["HTTP_ACCEPT"] = "*/*" + self.assertIs(request.accepts("application/json"), True) + + def test_request_accepts_none(self): + request = HttpRequest() + request.META["HTTP_ACCEPT"] = "" + self.assertIs(request.accepts("application/json"), False) + self.assertEqual(request.accepted_types, []) + + def test_request_accepts_some(self): + request = HttpRequest() + request.META[ + "HTTP_ACCEPT" + ] = "text/html,application/xhtml+xml,application/xml;q=0.9" + self.assertIs(request.accepts("text/html"), True) + self.assertIs(request.accepts("application/xhtml+xml"), True) + self.assertIs(request.accepts("application/xml"), True) + self.assertIs(request.accepts("application/json"), False) diff --git a/testbed/django__django/tests/requests_tests/test_data_upload_settings.py b/testbed/django__django/tests/requests_tests/test_data_upload_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..e89af0a39b827f87b276f843e1fb7fe54dde981b --- /dev/null +++ b/testbed/django__django/tests/requests_tests/test_data_upload_settings.py @@ -0,0 +1,270 @@ +from io import BytesIO + +from django.core.exceptions import ( + RequestDataTooBig, + TooManyFieldsSent, + TooManyFilesSent, +) +from django.core.handlers.wsgi import WSGIRequest +from django.test import SimpleTestCase +from django.test.client import FakePayload + +TOO_MANY_FIELDS_MSG = ( + "The number of GET/POST parameters exceeded settings.DATA_UPLOAD_MAX_NUMBER_FIELDS." +) +TOO_MANY_FILES_MSG = ( + "The number of files exceeded settings.DATA_UPLOAD_MAX_NUMBER_FILES." +) +TOO_MUCH_DATA_MSG = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE." + + +class DataUploadMaxMemorySizeFormPostTests(SimpleTestCase): + def setUp(self): + payload = FakePayload("a=1&a=2&a=3\r\n") + self.request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + + def test_size_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=12): + with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG): + self.request._load_post_and_files() + + def test_size_not_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=13): + self.request._load_post_and_files() + + def test_no_limit(self): + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None): + self.request._load_post_and_files() + + +class DataUploadMaxMemorySizeMultipartPostTests(SimpleTestCase): + def setUp(self): + payload = FakePayload( + "\r\n".join( + [ + "--boundary", + 'Content-Disposition: form-data; name="name"', + "", + "value", + "--boundary--", + ] + ) + ) + self.request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/form-data; boundary=boundary", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + + def test_size_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=10): + with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG): + self.request._load_post_and_files() + + def test_size_not_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=11): + self.request._load_post_and_files() + + def test_no_limit(self): + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None): + self.request._load_post_and_files() + + def test_file_passes(self): + payload = FakePayload( + "\r\n".join( + [ + "--boundary", + 'Content-Disposition: form-data; name="file1"; ' + 'filename="test.file"', + "", + "value", + "--boundary--", + ] + ) + ) + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/form-data; boundary=boundary", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=1): + request._load_post_and_files() + self.assertIn("file1", request.FILES, "Upload file not present") + + +class DataUploadMaxMemorySizeGetTests(SimpleTestCase): + def setUp(self): + self.request = WSGIRequest( + { + "REQUEST_METHOD": "GET", + "wsgi.input": BytesIO(b""), + "CONTENT_LENGTH": 3, + } + ) + + def test_data_upload_max_memory_size_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=2): + with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG): + self.request.body + + def test_size_not_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=3): + self.request.body + + def test_no_limit(self): + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None): + self.request.body + + def test_empty_content_length(self): + self.request.environ["CONTENT_LENGTH"] = "" + self.request.body + + +class DataUploadMaxNumberOfFieldsGet(SimpleTestCase): + def test_get_max_fields_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_NUMBER_FIELDS=1): + with self.assertRaisesMessage(TooManyFieldsSent, TOO_MANY_FIELDS_MSG): + request = WSGIRequest( + { + "REQUEST_METHOD": "GET", + "wsgi.input": BytesIO(b""), + "QUERY_STRING": "a=1&a=2&a=3", + } + ) + request.GET["a"] + + def test_get_max_fields_not_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_NUMBER_FIELDS=3): + request = WSGIRequest( + { + "REQUEST_METHOD": "GET", + "wsgi.input": BytesIO(b""), + "QUERY_STRING": "a=1&a=2&a=3", + } + ) + request.GET["a"] + + +class DataUploadMaxNumberOfFieldsMultipartPost(SimpleTestCase): + def setUp(self): + payload = FakePayload( + "\r\n".join( + [ + "--boundary", + 'Content-Disposition: form-data; name="name1"', + "", + "value1", + "--boundary", + 'Content-Disposition: form-data; name="name2"', + "", + "value2", + "--boundary--", + ] + ) + ) + self.request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/form-data; boundary=boundary", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + + def test_number_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_NUMBER_FIELDS=1): + with self.assertRaisesMessage(TooManyFieldsSent, TOO_MANY_FIELDS_MSG): + self.request._load_post_and_files() + + def test_number_not_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_NUMBER_FIELDS=2): + self.request._load_post_and_files() + + def test_no_limit(self): + with self.settings(DATA_UPLOAD_MAX_NUMBER_FIELDS=None): + self.request._load_post_and_files() + + +class DataUploadMaxNumberOfFilesMultipartPost(SimpleTestCase): + def setUp(self): + payload = FakePayload( + "\r\n".join( + [ + "--boundary", + ( + 'Content-Disposition: form-data; name="name1"; ' + 'filename="name1.txt"' + ), + "", + "value1", + "--boundary", + ( + 'Content-Disposition: form-data; name="name2"; ' + 'filename="name2.txt"' + ), + "", + "value2", + "--boundary--", + ] + ) + ) + self.request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/form-data; boundary=boundary", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + + def test_number_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_NUMBER_FILES=1): + with self.assertRaisesMessage(TooManyFilesSent, TOO_MANY_FILES_MSG): + self.request._load_post_and_files() + + def test_number_not_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_NUMBER_FILES=2): + self.request._load_post_and_files() + + def test_no_limit(self): + with self.settings(DATA_UPLOAD_MAX_NUMBER_FILES=None): + self.request._load_post_and_files() + + +class DataUploadMaxNumberOfFieldsFormPost(SimpleTestCase): + def setUp(self): + payload = FakePayload("\r\n".join(["a=1&a=2&a=3", ""])) + self.request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + + def test_number_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_NUMBER_FIELDS=2): + with self.assertRaisesMessage(TooManyFieldsSent, TOO_MANY_FIELDS_MSG): + self.request._load_post_and_files() + + def test_number_not_exceeded(self): + with self.settings(DATA_UPLOAD_MAX_NUMBER_FIELDS=3): + self.request._load_post_and_files() + + def test_no_limit(self): + with self.settings(DATA_UPLOAD_MAX_NUMBER_FIELDS=None): + self.request._load_post_and_files() diff --git a/testbed/django__django/tests/requests_tests/tests.py b/testbed/django__django/tests/requests_tests/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..79f82741dbae42db4af35e97bd0ef65adc02e590 --- /dev/null +++ b/testbed/django__django/tests/requests_tests/tests.py @@ -0,0 +1,1151 @@ +import copy +from io import BytesIO +from itertools import chain +from urllib.parse import urlencode + +from django.core.exceptions import DisallowedHost +from django.core.handlers.wsgi import LimitedStream, WSGIRequest +from django.http import ( + HttpHeaders, + HttpRequest, + RawPostDataException, + UnreadablePostError, +) +from django.http.multipartparser import MultiPartParserError +from django.http.request import split_domain_port +from django.test import RequestFactory, SimpleTestCase, override_settings +from django.test.client import BOUNDARY, MULTIPART_CONTENT, FakePayload + + +class RequestsTests(SimpleTestCase): + def test_httprequest(self): + request = HttpRequest() + self.assertEqual(list(request.GET), []) + self.assertEqual(list(request.POST), []) + self.assertEqual(list(request.COOKIES), []) + self.assertEqual(list(request.META), []) + + # .GET and .POST should be QueryDicts + self.assertEqual(request.GET.urlencode(), "") + self.assertEqual(request.POST.urlencode(), "") + + # and FILES should be MultiValueDict + self.assertEqual(request.FILES.getlist("foo"), []) + + self.assertIsNone(request.content_type) + self.assertIsNone(request.content_params) + + def test_httprequest_full_path(self): + request = HttpRequest() + request.path = "/;some/?awful/=path/foo:bar/" + request.path_info = "/prefix" + request.path + request.META["QUERY_STRING"] = ";some=query&+query=string" + expected = "/%3Bsome/%3Fawful/%3Dpath/foo:bar/?;some=query&+query=string" + self.assertEqual(request.get_full_path(), expected) + self.assertEqual(request.get_full_path_info(), "/prefix" + expected) + + def test_httprequest_full_path_with_query_string_and_fragment(self): + request = HttpRequest() + request.path = "/foo#bar" + request.path_info = "/prefix" + request.path + request.META["QUERY_STRING"] = "baz#quux" + self.assertEqual(request.get_full_path(), "/foo%23bar?baz#quux") + self.assertEqual(request.get_full_path_info(), "/prefix/foo%23bar?baz#quux") + + def test_httprequest_repr(self): + request = HttpRequest() + request.path = "/somepath/" + request.method = "GET" + request.GET = {"get-key": "get-value"} + request.POST = {"post-key": "post-value"} + request.COOKIES = {"post-key": "post-value"} + request.META = {"post-key": "post-value"} + self.assertEqual(repr(request), "") + + def test_httprequest_repr_invalid_method_and_path(self): + request = HttpRequest() + self.assertEqual(repr(request), "") + request = HttpRequest() + request.method = "GET" + self.assertEqual(repr(request), "") + request = HttpRequest() + request.path = "" + self.assertEqual(repr(request), "") + + def test_wsgirequest(self): + request = WSGIRequest( + { + "PATH_INFO": "bogus", + "REQUEST_METHOD": "bogus", + "CONTENT_TYPE": "text/html; charset=utf8", + "wsgi.input": BytesIO(b""), + } + ) + self.assertEqual(list(request.GET), []) + self.assertEqual(list(request.POST), []) + self.assertEqual(list(request.COOKIES), []) + self.assertEqual( + set(request.META), + { + "PATH_INFO", + "REQUEST_METHOD", + "SCRIPT_NAME", + "CONTENT_TYPE", + "wsgi.input", + }, + ) + self.assertEqual(request.META["PATH_INFO"], "bogus") + self.assertEqual(request.META["REQUEST_METHOD"], "bogus") + self.assertEqual(request.META["SCRIPT_NAME"], "") + self.assertEqual(request.content_type, "text/html") + self.assertEqual(request.content_params, {"charset": "utf8"}) + + def test_wsgirequest_with_script_name(self): + """ + The request's path is correctly assembled, regardless of whether or + not the SCRIPT_NAME has a trailing slash (#20169). + """ + # With trailing slash + request = WSGIRequest( + { + "PATH_INFO": "/somepath/", + "SCRIPT_NAME": "/PREFIX/", + "REQUEST_METHOD": "get", + "wsgi.input": BytesIO(b""), + } + ) + self.assertEqual(request.path, "/PREFIX/somepath/") + # Without trailing slash + request = WSGIRequest( + { + "PATH_INFO": "/somepath/", + "SCRIPT_NAME": "/PREFIX", + "REQUEST_METHOD": "get", + "wsgi.input": BytesIO(b""), + } + ) + self.assertEqual(request.path, "/PREFIX/somepath/") + + def test_wsgirequest_script_url_double_slashes(self): + """ + WSGI squashes multiple successive slashes in PATH_INFO, WSGIRequest + should take that into account when populating request.path and + request.META['SCRIPT_NAME'] (#17133). + """ + request = WSGIRequest( + { + "SCRIPT_URL": "/mst/milestones//accounts/login//help", + "PATH_INFO": "/milestones/accounts/login/help", + "REQUEST_METHOD": "get", + "wsgi.input": BytesIO(b""), + } + ) + self.assertEqual(request.path, "/mst/milestones/accounts/login/help") + self.assertEqual(request.META["SCRIPT_NAME"], "/mst") + + def test_wsgirequest_with_force_script_name(self): + """ + The FORCE_SCRIPT_NAME setting takes precedence over the request's + SCRIPT_NAME environment parameter (#20169). + """ + with override_settings(FORCE_SCRIPT_NAME="/FORCED_PREFIX/"): + request = WSGIRequest( + { + "PATH_INFO": "/somepath/", + "SCRIPT_NAME": "/PREFIX/", + "REQUEST_METHOD": "get", + "wsgi.input": BytesIO(b""), + } + ) + self.assertEqual(request.path, "/FORCED_PREFIX/somepath/") + + def test_wsgirequest_path_with_force_script_name_trailing_slash(self): + """ + The request's path is correctly assembled, regardless of whether or not + the FORCE_SCRIPT_NAME setting has a trailing slash (#20169). + """ + # With trailing slash + with override_settings(FORCE_SCRIPT_NAME="/FORCED_PREFIX/"): + request = WSGIRequest( + { + "PATH_INFO": "/somepath/", + "REQUEST_METHOD": "get", + "wsgi.input": BytesIO(b""), + } + ) + self.assertEqual(request.path, "/FORCED_PREFIX/somepath/") + # Without trailing slash + with override_settings(FORCE_SCRIPT_NAME="/FORCED_PREFIX"): + request = WSGIRequest( + { + "PATH_INFO": "/somepath/", + "REQUEST_METHOD": "get", + "wsgi.input": BytesIO(b""), + } + ) + self.assertEqual(request.path, "/FORCED_PREFIX/somepath/") + + def test_wsgirequest_repr(self): + request = WSGIRequest({"REQUEST_METHOD": "get", "wsgi.input": BytesIO(b"")}) + self.assertEqual(repr(request), "") + request = WSGIRequest( + { + "PATH_INFO": "/somepath/", + "REQUEST_METHOD": "get", + "wsgi.input": BytesIO(b""), + } + ) + request.GET = {"get-key": "get-value"} + request.POST = {"post-key": "post-value"} + request.COOKIES = {"post-key": "post-value"} + request.META = {"post-key": "post-value"} + self.assertEqual(repr(request), "") + + def test_wsgirequest_path_info(self): + def wsgi_str(path_info, encoding="utf-8"): + path_info = path_info.encode( + encoding + ) # Actual URL sent by the browser (bytestring) + path_info = path_info.decode( + "iso-8859-1" + ) # Value in the WSGI environ dict (native string) + return path_info + + # Regression for #19468 + request = WSGIRequest( + { + "PATH_INFO": wsgi_str("/سلام/"), + "REQUEST_METHOD": "get", + "wsgi.input": BytesIO(b""), + } + ) + self.assertEqual(request.path, "/سلام/") + + # The URL may be incorrectly encoded in a non-UTF-8 encoding (#26971) + request = WSGIRequest( + { + "PATH_INFO": wsgi_str("/café/", encoding="iso-8859-1"), + "REQUEST_METHOD": "get", + "wsgi.input": BytesIO(b""), + } + ) + # Since it's impossible to decide the (wrong) encoding of the URL, it's + # left percent-encoded in the path. + self.assertEqual(request.path, "/caf%E9/") + + def test_wsgirequest_copy(self): + request = WSGIRequest({"REQUEST_METHOD": "get", "wsgi.input": BytesIO(b"")}) + request_copy = copy.copy(request) + self.assertIs(request_copy.environ, request.environ) + + def test_limited_stream(self): + # Read all of a limited stream + stream = LimitedStream(BytesIO(b"test"), 2) + self.assertEqual(stream.read(), b"te") + # Reading again returns nothing. + self.assertEqual(stream.read(), b"") + + # Read a number of characters greater than the stream has to offer + stream = LimitedStream(BytesIO(b"test"), 2) + self.assertEqual(stream.read(5), b"te") + # Reading again returns nothing. + self.assertEqual(stream.readline(5), b"") + + # Read sequentially from a stream + stream = LimitedStream(BytesIO(b"12345678"), 8) + self.assertEqual(stream.read(5), b"12345") + self.assertEqual(stream.read(5), b"678") + # Reading again returns nothing. + self.assertEqual(stream.readline(5), b"") + + # Read lines from a stream + stream = LimitedStream(BytesIO(b"1234\n5678\nabcd\nefgh\nijkl"), 24) + # Read a full line, unconditionally + self.assertEqual(stream.readline(), b"1234\n") + # Read a number of characters less than a line + self.assertEqual(stream.readline(2), b"56") + # Read the rest of the partial line + self.assertEqual(stream.readline(), b"78\n") + # Read a full line, with a character limit greater than the line length + self.assertEqual(stream.readline(6), b"abcd\n") + # Read the next line, deliberately terminated at the line end + self.assertEqual(stream.readline(4), b"efgh") + # Read the next line... just the line end + self.assertEqual(stream.readline(), b"\n") + # Read everything else. + self.assertEqual(stream.readline(), b"ijkl") + + # Regression for #15018 + # If a stream contains a newline, but the provided length + # is less than the number of provided characters, the newline + # doesn't reset the available character count + stream = LimitedStream(BytesIO(b"1234\nabcdef"), 9) + self.assertEqual(stream.readline(10), b"1234\n") + self.assertEqual(stream.readline(3), b"abc") + # Now expire the available characters + self.assertEqual(stream.readline(3), b"d") + # Reading again returns nothing. + self.assertEqual(stream.readline(2), b"") + + # Same test, but with read, not readline. + stream = LimitedStream(BytesIO(b"1234\nabcdef"), 9) + self.assertEqual(stream.read(6), b"1234\na") + self.assertEqual(stream.read(2), b"bc") + self.assertEqual(stream.read(2), b"d") + self.assertEqual(stream.read(2), b"") + self.assertEqual(stream.read(), b"") + + def test_stream_read(self): + payload = FakePayload("name=value") + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + }, + ) + self.assertEqual(request.read(), b"name=value") + + def test_stream_readline(self): + payload = FakePayload("name=value\nother=string") + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + }, + ) + self.assertEqual(request.readline(), b"name=value\n") + self.assertEqual(request.readline(), b"other=string") + + def test_read_after_value(self): + """ + Reading from request is allowed after accessing request contents as + POST or body. + """ + payload = FakePayload("name=value") + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + self.assertEqual(request.POST, {"name": ["value"]}) + self.assertEqual(request.body, b"name=value") + self.assertEqual(request.read(), b"name=value") + + def test_value_after_read(self): + """ + Construction of POST or body is not allowed after reading + from request. + """ + payload = FakePayload("name=value") + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + self.assertEqual(request.read(2), b"na") + with self.assertRaises(RawPostDataException): + request.body + self.assertEqual(request.POST, {}) + + def test_non_ascii_POST(self): + payload = FakePayload(urlencode({"key": "España"})) + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_LENGTH": len(payload), + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "wsgi.input": payload, + } + ) + self.assertEqual(request.POST, {"key": ["España"]}) + + def test_alternate_charset_POST(self): + """ + Test a POST with non-utf-8 payload encoding. + """ + payload = FakePayload(urlencode({"key": "España".encode("latin-1")})) + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_LENGTH": len(payload), + "CONTENT_TYPE": "application/x-www-form-urlencoded; charset=iso-8859-1", + "wsgi.input": payload, + } + ) + self.assertEqual(request.POST, {"key": ["España"]}) + + def test_body_after_POST_multipart_form_data(self): + """ + Reading body after parsing multipart/form-data is not allowed + """ + # Because multipart is used for large amounts of data i.e. file uploads, + # we don't want the data held in memory twice, and we don't want to + # silence the error by setting body = '' either. + payload = FakePayload( + "\r\n".join( + [ + "--boundary", + 'Content-Disposition: form-data; name="name"', + "", + "value", + "--boundary--", + ] + ) + ) + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/form-data; boundary=boundary", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + self.assertEqual(request.POST, {"name": ["value"]}) + with self.assertRaises(RawPostDataException): + request.body + + def test_body_after_POST_multipart_related(self): + """ + Reading body after parsing multipart that isn't form-data is allowed + """ + # Ticket #9054 + # There are cases in which the multipart data is related instead of + # being a binary upload, in which case it should still be accessible + # via body. + payload_data = b"\r\n".join( + [ + b"--boundary", + b'Content-ID: id; name="name"', + b"", + b"value", + b"--boundary--", + ] + ) + payload = FakePayload(payload_data) + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/related; boundary=boundary", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + self.assertEqual(request.POST, {}) + self.assertEqual(request.body, payload_data) + + def test_POST_multipart_with_content_length_zero(self): + """ + Multipart POST requests with Content-Length >= 0 are valid and need to + be handled. + """ + # According to RFC 9110 Section 8.6 every POST with Content-Length >= 0 + # is a valid request, so ensure that we handle Content-Length == 0. + payload = FakePayload( + "\r\n".join( + [ + "--boundary", + 'Content-Disposition: form-data; name="name"', + "", + "value", + "--boundary--", + ] + ) + ) + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/form-data; boundary=boundary", + "CONTENT_LENGTH": 0, + "wsgi.input": payload, + } + ) + self.assertEqual(request.POST, {}) + + def test_POST_binary_only(self): + payload = b"\r\n\x01\x00\x00\x00ab\x00\x00\xcd\xcc,@" + environ = { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/octet-stream", + "CONTENT_LENGTH": len(payload), + "wsgi.input": BytesIO(payload), + } + request = WSGIRequest(environ) + self.assertEqual(request.POST, {}) + self.assertEqual(request.FILES, {}) + self.assertEqual(request.body, payload) + + # Same test without specifying content-type + environ.update({"CONTENT_TYPE": "", "wsgi.input": BytesIO(payload)}) + request = WSGIRequest(environ) + self.assertEqual(request.POST, {}) + self.assertEqual(request.FILES, {}) + self.assertEqual(request.body, payload) + + def test_read_by_lines(self): + payload = FakePayload("name=value") + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + self.assertEqual(list(request), [b"name=value"]) + + def test_POST_after_body_read(self): + """ + POST should be populated even if body is read first + """ + payload = FakePayload("name=value") + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + request.body # evaluate + self.assertEqual(request.POST, {"name": ["value"]}) + + def test_POST_after_body_read_and_stream_read(self): + """ + POST should be populated even if body is read first, and then + the stream is read second. + """ + payload = FakePayload("name=value") + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + request.body # evaluate + self.assertEqual(request.read(1), b"n") + self.assertEqual(request.POST, {"name": ["value"]}) + + def test_multipart_post_field_with_base64(self): + payload = FakePayload( + "\r\n".join( + [ + f"--{BOUNDARY}", + 'Content-Disposition: form-data; name="name"', + "Content-Transfer-Encoding: base64", + "", + "dmFsdWU=", + f"--{BOUNDARY}--", + "", + ] + ) + ) + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": MULTIPART_CONTENT, + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + request.body # evaluate + self.assertEqual(request.POST, {"name": ["value"]}) + + def test_multipart_post_field_with_invalid_base64(self): + payload = FakePayload( + "\r\n".join( + [ + f"--{BOUNDARY}", + 'Content-Disposition: form-data; name="name"', + "Content-Transfer-Encoding: base64", + "", + "123", + f"--{BOUNDARY}--", + "", + ] + ) + ) + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": MULTIPART_CONTENT, + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + request.body # evaluate + self.assertEqual(request.POST, {"name": ["123"]}) + + def test_POST_after_body_read_and_stream_read_multipart(self): + """ + POST should be populated even if body is read first, and then + the stream is read second. Using multipart/form-data instead of urlencoded. + """ + payload = FakePayload( + "\r\n".join( + [ + "--boundary", + 'Content-Disposition: form-data; name="name"', + "", + "value", + "--boundary--" "", + ] + ) + ) + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/form-data; boundary=boundary", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + request.body # evaluate + # Consume enough data to mess up the parsing: + self.assertEqual(request.read(13), b"--boundary\r\nC") + self.assertEqual(request.POST, {"name": ["value"]}) + + def test_POST_immutable_for_multipart(self): + """ + MultiPartParser.parse() leaves request.POST immutable. + """ + payload = FakePayload( + "\r\n".join( + [ + "--boundary", + 'Content-Disposition: form-data; name="name"', + "", + "value", + "--boundary--", + ] + ) + ) + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/form-data; boundary=boundary", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + self.assertFalse(request.POST._mutable) + + def test_multipart_without_boundary(self): + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/form-data;", + "CONTENT_LENGTH": 0, + "wsgi.input": FakePayload(), + } + ) + with self.assertRaisesMessage( + MultiPartParserError, "Invalid boundary in multipart: None" + ): + request.POST + + def test_multipart_non_ascii_content_type(self): + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/form-data; boundary = \xe0", + "CONTENT_LENGTH": 0, + "wsgi.input": FakePayload(), + } + ) + msg = ( + "Invalid non-ASCII Content-Type in multipart: multipart/form-data; " + "boundary = à" + ) + with self.assertRaisesMessage(MultiPartParserError, msg): + request.POST + + def test_POST_connection_error(self): + """ + If wsgi.input.read() raises an exception while trying to read() the + POST, the exception is identifiable (not a generic OSError). + """ + + class ExplodingBytesIO(BytesIO): + def read(self, size=-1, /): + raise OSError("kaboom!") + + payload = b"name=value" + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "CONTENT_LENGTH": len(payload), + "wsgi.input": ExplodingBytesIO(payload), + } + ) + with self.assertRaises(UnreadablePostError): + request.body + + def test_set_encoding_clears_POST(self): + payload = FakePayload("name=Hello Günter") + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "application/x-www-form-urlencoded", + "CONTENT_LENGTH": len(payload), + "wsgi.input": payload, + } + ) + self.assertEqual(request.POST, {"name": ["Hello Günter"]}) + request.encoding = "iso-8859-16" + self.assertEqual(request.POST, {"name": ["Hello GĂŒnter"]}) + + def test_set_encoding_clears_GET(self): + payload = FakePayload("") + request = WSGIRequest( + { + "REQUEST_METHOD": "GET", + "wsgi.input": payload, + "QUERY_STRING": "name=Hello%20G%C3%BCnter", + } + ) + self.assertEqual(request.GET, {"name": ["Hello Günter"]}) + request.encoding = "iso-8859-16" + self.assertEqual(request.GET, {"name": ["Hello G\u0102\u0152nter"]}) + + def test_FILES_connection_error(self): + """ + If wsgi.input.read() raises an exception while trying to read() the + FILES, the exception is identifiable (not a generic OSError). + """ + + class ExplodingBytesIO(BytesIO): + def read(self, size=-1, /): + raise OSError("kaboom!") + + payload = b"x" + request = WSGIRequest( + { + "REQUEST_METHOD": "POST", + "CONTENT_TYPE": "multipart/form-data; boundary=foo_", + "CONTENT_LENGTH": len(payload), + "wsgi.input": ExplodingBytesIO(payload), + } + ) + with self.assertRaises(UnreadablePostError): + request.FILES + + def test_copy(self): + request = HttpRequest() + request_copy = copy.copy(request) + self.assertIs(request_copy.resolver_match, request.resolver_match) + + def test_deepcopy(self): + request = RequestFactory().get("/") + request.session = {} + request_copy = copy.deepcopy(request) + request.session["key"] = "value" + self.assertEqual(request_copy.session, {}) + + +class HostValidationTests(SimpleTestCase): + poisoned_hosts = [ + "example.com@evil.tld", + "example.com:dr.frankenstein@evil.tld", + "example.com:dr.frankenstein@evil.tld:80", + "example.com:80/badpath", + "example.com: recovermypassword.com", + ] + + @override_settings( + USE_X_FORWARDED_HOST=False, + ALLOWED_HOSTS=[ + "forward.com", + "example.com", + "internal.com", + "12.34.56.78", + "[2001:19f0:feee::dead:beef:cafe]", + "xn--4ca9at.com", + ".multitenant.com", + "INSENSITIVE.com", + "[::ffff:169.254.169.254]", + ], + ) + def test_http_get_host(self): + # Check if X_FORWARDED_HOST is provided. + request = HttpRequest() + request.META = { + "HTTP_X_FORWARDED_HOST": "forward.com", + "HTTP_HOST": "example.com", + "SERVER_NAME": "internal.com", + "SERVER_PORT": 80, + } + # X_FORWARDED_HOST is ignored. + self.assertEqual(request.get_host(), "example.com") + + # Check if X_FORWARDED_HOST isn't provided. + request = HttpRequest() + request.META = { + "HTTP_HOST": "example.com", + "SERVER_NAME": "internal.com", + "SERVER_PORT": 80, + } + self.assertEqual(request.get_host(), "example.com") + + # Check if HTTP_HOST isn't provided. + request = HttpRequest() + request.META = { + "SERVER_NAME": "internal.com", + "SERVER_PORT": 80, + } + self.assertEqual(request.get_host(), "internal.com") + + # Check if HTTP_HOST isn't provided, and we're on a nonstandard port + request = HttpRequest() + request.META = { + "SERVER_NAME": "internal.com", + "SERVER_PORT": 8042, + } + self.assertEqual(request.get_host(), "internal.com:8042") + + legit_hosts = [ + "example.com", + "example.com:80", + "12.34.56.78", + "12.34.56.78:443", + "[2001:19f0:feee::dead:beef:cafe]", + "[2001:19f0:feee::dead:beef:cafe]:8080", + "xn--4ca9at.com", # Punycode for öäü.com + "anything.multitenant.com", + "multitenant.com", + "insensitive.com", + "example.com.", + "example.com.:80", + "[::ffff:169.254.169.254]", + ] + + for host in legit_hosts: + request = HttpRequest() + request.META = { + "HTTP_HOST": host, + } + request.get_host() + + # Poisoned host headers are rejected as suspicious + for host in chain(self.poisoned_hosts, ["other.com", "example.com.."]): + with self.assertRaises(DisallowedHost): + request = HttpRequest() + request.META = { + "HTTP_HOST": host, + } + request.get_host() + + @override_settings(USE_X_FORWARDED_HOST=True, ALLOWED_HOSTS=["*"]) + def test_http_get_host_with_x_forwarded_host(self): + # Check if X_FORWARDED_HOST is provided. + request = HttpRequest() + request.META = { + "HTTP_X_FORWARDED_HOST": "forward.com", + "HTTP_HOST": "example.com", + "SERVER_NAME": "internal.com", + "SERVER_PORT": 80, + } + # X_FORWARDED_HOST is obeyed. + self.assertEqual(request.get_host(), "forward.com") + + # Check if X_FORWARDED_HOST isn't provided. + request = HttpRequest() + request.META = { + "HTTP_HOST": "example.com", + "SERVER_NAME": "internal.com", + "SERVER_PORT": 80, + } + self.assertEqual(request.get_host(), "example.com") + + # Check if HTTP_HOST isn't provided. + request = HttpRequest() + request.META = { + "SERVER_NAME": "internal.com", + "SERVER_PORT": 80, + } + self.assertEqual(request.get_host(), "internal.com") + + # Check if HTTP_HOST isn't provided, and we're on a nonstandard port + request = HttpRequest() + request.META = { + "SERVER_NAME": "internal.com", + "SERVER_PORT": 8042, + } + self.assertEqual(request.get_host(), "internal.com:8042") + + # Poisoned host headers are rejected as suspicious + legit_hosts = [ + "example.com", + "example.com:80", + "12.34.56.78", + "12.34.56.78:443", + "[2001:19f0:feee::dead:beef:cafe]", + "[2001:19f0:feee::dead:beef:cafe]:8080", + "xn--4ca9at.com", # Punycode for öäü.com + ] + + for host in legit_hosts: + request = HttpRequest() + request.META = { + "HTTP_HOST": host, + } + request.get_host() + + for host in self.poisoned_hosts: + with self.assertRaises(DisallowedHost): + request = HttpRequest() + request.META = { + "HTTP_HOST": host, + } + request.get_host() + + @override_settings(USE_X_FORWARDED_PORT=False) + def test_get_port(self): + request = HttpRequest() + request.META = { + "SERVER_PORT": "8080", + "HTTP_X_FORWARDED_PORT": "80", + } + # Shouldn't use the X-Forwarded-Port header + self.assertEqual(request.get_port(), "8080") + + request = HttpRequest() + request.META = { + "SERVER_PORT": "8080", + } + self.assertEqual(request.get_port(), "8080") + + @override_settings(USE_X_FORWARDED_PORT=True) + def test_get_port_with_x_forwarded_port(self): + request = HttpRequest() + request.META = { + "SERVER_PORT": "8080", + "HTTP_X_FORWARDED_PORT": "80", + } + # Should use the X-Forwarded-Port header + self.assertEqual(request.get_port(), "80") + + request = HttpRequest() + request.META = { + "SERVER_PORT": "8080", + } + self.assertEqual(request.get_port(), "8080") + + @override_settings(DEBUG=True, ALLOWED_HOSTS=[]) + def test_host_validation_in_debug_mode(self): + """ + If ALLOWED_HOSTS is empty and DEBUG is True, variants of localhost are + allowed. + """ + valid_hosts = ["localhost", "subdomain.localhost", "127.0.0.1", "[::1]"] + for host in valid_hosts: + request = HttpRequest() + request.META = {"HTTP_HOST": host} + self.assertEqual(request.get_host(), host) + + # Other hostnames raise a DisallowedHost. + with self.assertRaises(DisallowedHost): + request = HttpRequest() + request.META = {"HTTP_HOST": "example.com"} + request.get_host() + + @override_settings(ALLOWED_HOSTS=[]) + def test_get_host_suggestion_of_allowed_host(self): + """ + get_host() makes helpful suggestions if a valid-looking host is not in + ALLOWED_HOSTS. + """ + msg_invalid_host = "Invalid HTTP_HOST header: %r." + msg_suggestion = msg_invalid_host + " You may need to add %r to ALLOWED_HOSTS." + msg_suggestion2 = ( + msg_invalid_host + + " The domain name provided is not valid according to RFC 1034/1035" + ) + + for host in [ # Valid-looking hosts + "example.com", + "12.34.56.78", + "[2001:19f0:feee::dead:beef:cafe]", + "xn--4ca9at.com", # Punycode for öäü.com + ]: + request = HttpRequest() + request.META = {"HTTP_HOST": host} + with self.assertRaisesMessage( + DisallowedHost, msg_suggestion % (host, host) + ): + request.get_host() + + for domain, port in [ # Valid-looking hosts with a port number + ("example.com", 80), + ("12.34.56.78", 443), + ("[2001:19f0:feee::dead:beef:cafe]", 8080), + ]: + host = "%s:%s" % (domain, port) + request = HttpRequest() + request.META = {"HTTP_HOST": host} + with self.assertRaisesMessage( + DisallowedHost, msg_suggestion % (host, domain) + ): + request.get_host() + + for host in self.poisoned_hosts: + request = HttpRequest() + request.META = {"HTTP_HOST": host} + with self.assertRaisesMessage(DisallowedHost, msg_invalid_host % host): + request.get_host() + + request = HttpRequest() + request.META = {"HTTP_HOST": "invalid_hostname.com"} + with self.assertRaisesMessage( + DisallowedHost, msg_suggestion2 % "invalid_hostname.com" + ): + request.get_host() + + def test_split_domain_port_removes_trailing_dot(self): + domain, port = split_domain_port("example.com.:8080") + self.assertEqual(domain, "example.com") + self.assertEqual(port, "8080") + + +class BuildAbsoluteURITests(SimpleTestCase): + factory = RequestFactory() + + def test_absolute_url(self): + request = HttpRequest() + url = "https://www.example.com/asdf" + self.assertEqual(request.build_absolute_uri(location=url), url) + + def test_host_retrieval(self): + request = HttpRequest() + request.get_host = lambda: "www.example.com" + request.path = "" + self.assertEqual( + request.build_absolute_uri(location="/path/with:colons"), + "http://www.example.com/path/with:colons", + ) + + def test_request_path_begins_with_two_slashes(self): + # //// creates a request with a path beginning with // + request = self.factory.get("////absolute-uri") + tests = ( + # location isn't provided + (None, "http://testserver//absolute-uri"), + # An absolute URL + ("http://example.com/?foo=bar", "http://example.com/?foo=bar"), + # A schema-relative URL + ("//example.com/?foo=bar", "http://example.com/?foo=bar"), + # Relative URLs + ("/foo/bar/", "http://testserver/foo/bar/"), + ("/foo/./bar/", "http://testserver/foo/bar/"), + ("/foo/../bar/", "http://testserver/bar/"), + ("///foo/bar/", "http://testserver/foo/bar/"), + ) + for location, expected_url in tests: + with self.subTest(location=location): + self.assertEqual( + request.build_absolute_uri(location=location), expected_url + ) + + +class RequestHeadersTests(SimpleTestCase): + ENVIRON = { + # Non-headers are ignored. + "PATH_INFO": "/somepath/", + "REQUEST_METHOD": "get", + "wsgi.input": BytesIO(b""), + "SERVER_NAME": "internal.com", + "SERVER_PORT": 80, + # These non-HTTP prefixed headers are included. + "CONTENT_TYPE": "text/html", + "CONTENT_LENGTH": "100", + # All HTTP-prefixed headers are included. + "HTTP_ACCEPT": "*", + "HTTP_HOST": "example.com", + "HTTP_USER_AGENT": "python-requests/1.2.0", + } + + def test_base_request_headers(self): + request = HttpRequest() + request.META = self.ENVIRON + self.assertEqual( + dict(request.headers), + { + "Content-Type": "text/html", + "Content-Length": "100", + "Accept": "*", + "Host": "example.com", + "User-Agent": "python-requests/1.2.0", + }, + ) + + def test_wsgi_request_headers(self): + request = WSGIRequest(self.ENVIRON) + self.assertEqual( + dict(request.headers), + { + "Content-Type": "text/html", + "Content-Length": "100", + "Accept": "*", + "Host": "example.com", + "User-Agent": "python-requests/1.2.0", + }, + ) + + def test_wsgi_request_headers_getitem(self): + request = WSGIRequest(self.ENVIRON) + self.assertEqual(request.headers["User-Agent"], "python-requests/1.2.0") + self.assertEqual(request.headers["user-agent"], "python-requests/1.2.0") + self.assertEqual(request.headers["user_agent"], "python-requests/1.2.0") + self.assertEqual(request.headers["Content-Type"], "text/html") + self.assertEqual(request.headers["Content-Length"], "100") + + def test_wsgi_request_headers_get(self): + request = WSGIRequest(self.ENVIRON) + self.assertEqual(request.headers.get("User-Agent"), "python-requests/1.2.0") + self.assertEqual(request.headers.get("user-agent"), "python-requests/1.2.0") + self.assertEqual(request.headers.get("Content-Type"), "text/html") + self.assertEqual(request.headers.get("Content-Length"), "100") + + +class HttpHeadersTests(SimpleTestCase): + def test_basic(self): + environ = { + "CONTENT_TYPE": "text/html", + "CONTENT_LENGTH": "100", + "HTTP_HOST": "example.com", + } + headers = HttpHeaders(environ) + self.assertEqual(sorted(headers), ["Content-Length", "Content-Type", "Host"]) + self.assertEqual( + headers, + { + "Content-Type": "text/html", + "Content-Length": "100", + "Host": "example.com", + }, + ) + + def test_parse_header_name(self): + tests = ( + ("PATH_INFO", None), + ("HTTP_ACCEPT", "Accept"), + ("HTTP_USER_AGENT", "User-Agent"), + ("HTTP_X_FORWARDED_PROTO", "X-Forwarded-Proto"), + ("CONTENT_TYPE", "Content-Type"), + ("CONTENT_LENGTH", "Content-Length"), + ) + for header, expected in tests: + with self.subTest(header=header): + self.assertEqual(HttpHeaders.parse_header_name(header), expected) diff --git a/testbed/django__django/tests/requirements/mysql.txt b/testbed/django__django/tests/requirements/mysql.txt new file mode 100644 index 0000000000000000000000000000000000000000..27be1d86cc9c5428ae27fba31d85854d68e012b7 --- /dev/null +++ b/testbed/django__django/tests/requirements/mysql.txt @@ -0,0 +1 @@ +mysqlclient >= 1.4.3 diff --git a/testbed/django__django/tests/requirements/oracle.txt b/testbed/django__django/tests/requirements/oracle.txt new file mode 100644 index 0000000000000000000000000000000000000000..5027358943395a12a88e1c34f781216f0a1bd2f1 --- /dev/null +++ b/testbed/django__django/tests/requirements/oracle.txt @@ -0,0 +1 @@ +cx_oracle >= 7.0 diff --git a/testbed/django__django/tests/requirements/py3.txt b/testbed/django__django/tests/requirements/py3.txt new file mode 100644 index 0000000000000000000000000000000000000000..5a7cc2f28aabedbc4df93142dbd710ef4e45eb46 --- /dev/null +++ b/testbed/django__django/tests/requirements/py3.txt @@ -0,0 +1,21 @@ +aiosmtpd +asgiref >= 3.7.0 +argon2-cffi >= 19.2.0 +bcrypt +black +docutils +geoip2; python_version < '3.12' +jinja2 >= 2.11.0 +numpy; python_version < '3.12' +Pillow >= 6.2.1; sys.platform != 'win32' or python_version < '3.12' +# pylibmc/libmemcached can't be built on Windows. +pylibmc; sys.platform != 'win32' +pymemcache >= 3.4.0 +pywatchman; sys.platform != 'win32' +PyYAML +redis >= 3.4.0 +selenium >= 4.8.0 +sqlparse >= 0.3.1 +tblib >= 1.5.0 +tzdata +colorama; sys.platform == 'win32' diff --git a/testbed/django__django/tests/reserved_names/tests.py b/testbed/django__django/tests/reserved_names/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..060ba22db5e989d100b47da3cfd67dc4e787d088 --- /dev/null +++ b/testbed/django__django/tests/reserved_names/tests.py @@ -0,0 +1,82 @@ +import datetime + +from django.test import TestCase + +from .models import Thing + + +class ReservedNameTests(TestCase): + def generate(self): + day1 = datetime.date(2005, 1, 1) + Thing.objects.create( + when="a", + join="b", + like="c", + drop="d", + alter="e", + having="f", + where=day1, + has_hyphen="h", + ) + day2 = datetime.date(2006, 2, 2) + Thing.objects.create( + when="h", + join="i", + like="j", + drop="k", + alter="l", + having="m", + where=day2, + ) + + def test_simple(self): + day1 = datetime.date(2005, 1, 1) + t = Thing.objects.create( + when="a", + join="b", + like="c", + drop="d", + alter="e", + having="f", + where=day1, + has_hyphen="h", + ) + self.assertEqual(t.when, "a") + + day2 = datetime.date(2006, 2, 2) + u = Thing.objects.create( + when="h", + join="i", + like="j", + drop="k", + alter="l", + having="m", + where=day2, + ) + self.assertEqual(u.when, "h") + + def test_order_by(self): + self.generate() + things = [t.when for t in Thing.objects.order_by("when")] + self.assertEqual(things, ["a", "h"]) + + def test_fields(self): + self.generate() + v = Thing.objects.get(pk="a") + self.assertEqual(v.join, "b") + self.assertEqual(v.where, datetime.date(year=2005, month=1, day=1)) + + def test_dates(self): + self.generate() + resp = Thing.objects.dates("where", "year") + self.assertEqual( + list(resp), + [ + datetime.date(2005, 1, 1), + datetime.date(2006, 1, 1), + ], + ) + + def test_month_filter(self): + self.generate() + self.assertEqual(Thing.objects.filter(where__month=1)[0].when, "a") diff --git a/testbed/django__django/tests/resolve_url/models.py b/testbed/django__django/tests/resolve_url/models.py new file mode 100644 index 0000000000000000000000000000000000000000..85d072a79f1a68cccf3f7b7e98d889cead0a52b4 --- /dev/null +++ b/testbed/django__django/tests/resolve_url/models.py @@ -0,0 +1,12 @@ +""" +Regression tests for the resolve_url function. +""" + +from django.db import models + + +class UnimportantThing(models.Model): + importance = models.IntegerField() + + def get_absolute_url(self): + return "/importance/%d/" % self.importance diff --git a/testbed/django__django/tests/resolve_url/tests.py b/testbed/django__django/tests/resolve_url/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..26fe783bf36a761fca4151350aac4ca48a160c6f --- /dev/null +++ b/testbed/django__django/tests/resolve_url/tests.py @@ -0,0 +1,82 @@ +from django.shortcuts import resolve_url +from django.test import SimpleTestCase, override_settings +from django.urls import NoReverseMatch, reverse_lazy + +from .models import UnimportantThing +from .urls import some_view + + +@override_settings(ROOT_URLCONF="resolve_url.urls") +class ResolveUrlTests(SimpleTestCase): + """ + Tests for the resolve_url() function. + """ + + def test_url_path(self): + """ + Passing a URL path to resolve_url() results in the same url. + """ + self.assertEqual("/something/", resolve_url("/something/")) + + def test_relative_path(self): + """ + Passing a relative URL path to resolve_url() results in the same url. + """ + self.assertEqual("../", resolve_url("../")) + self.assertEqual("../relative/", resolve_url("../relative/")) + self.assertEqual("./", resolve_url("./")) + self.assertEqual("./relative/", resolve_url("./relative/")) + + def test_full_url(self): + """ + Passing a full URL to resolve_url() results in the same url. + """ + url = "http://example.com/" + self.assertEqual(url, resolve_url(url)) + + def test_model(self): + """ + Passing a model to resolve_url() results in get_absolute_url() being + called on that model instance. + """ + m = UnimportantThing(importance=1) + self.assertEqual(m.get_absolute_url(), resolve_url(m)) + + def test_view_function(self): + """ + Passing a view function to resolve_url() results in the URL path + mapping to that view name. + """ + resolved_url = resolve_url(some_view) + self.assertEqual("/some-url/", resolved_url) + + def test_lazy_reverse(self): + """ + Passing the result of reverse_lazy is resolved to a real URL + string. + """ + resolved_url = resolve_url(reverse_lazy("some-view")) + self.assertIsInstance(resolved_url, str) + self.assertEqual("/some-url/", resolved_url) + + def test_valid_view_name(self): + """ + Passing a view name to resolve_url() results in the URL path mapping + to that view. + """ + resolved_url = resolve_url("some-view") + self.assertEqual("/some-url/", resolved_url) + + def test_domain(self): + """ + Passing a domain to resolve_url() returns the same domain. + """ + self.assertEqual(resolve_url("example.com"), "example.com") + + def test_non_view_callable_raises_no_reverse_match(self): + """ + Passing a non-view callable into resolve_url() raises a + NoReverseMatch exception. + """ + with self.assertRaises(NoReverseMatch): + resolve_url(lambda: "asdf") diff --git a/testbed/django__django/tests/resolve_url/urls.py b/testbed/django__django/tests/resolve_url/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..4cd4ffa03fd17b9a2dd36b19752e80bbc6017f16 --- /dev/null +++ b/testbed/django__django/tests/resolve_url/urls.py @@ -0,0 +1,10 @@ +from django.urls import path + + +def some_view(request): + pass + + +urlpatterns = [ + path("some-url/", some_view, name="some-view"), +] diff --git a/testbed/django__django/tests/responses/__init__.py b/testbed/django__django/tests/responses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/responses/test_cookie.py b/testbed/django__django/tests/responses/test_cookie.py new file mode 100644 index 0000000000000000000000000000000000000000..7e7f356deb5cdafc0e8944a571059d833246badf --- /dev/null +++ b/testbed/django__django/tests/responses/test_cookie.py @@ -0,0 +1,160 @@ +import time +from datetime import date, datetime, timedelta, timezone +from email.utils import format_datetime as format_datetime_rfc5322 +from http import cookies + +from django.http import HttpResponse +from django.test import SimpleTestCase +from django.test.utils import freeze_time +from django.utils.http import http_date + + +class SetCookieTests(SimpleTestCase): + def test_near_expiration(self): + """Cookie will expire when a near expiration time is provided.""" + response = HttpResponse() + # There's a timing weakness in this test; The expected result for + # max-age requires that there be a very slight difference between the + # evaluated expiration time and the time evaluated in set_cookie(). If + # this difference doesn't exist, the cookie time will be 1 second + # larger. The sleep guarantees that there will be a time difference. + expires = datetime.now(tz=timezone.utc).replace(tzinfo=None) + timedelta( + seconds=10 + ) + time.sleep(0.001) + response.set_cookie("datetime", expires=expires) + datetime_cookie = response.cookies["datetime"] + self.assertEqual(datetime_cookie["max-age"], 10) + + def test_aware_expiration(self): + """set_cookie() accepts an aware datetime as expiration time.""" + response = HttpResponse() + expires = datetime.now(tz=timezone.utc) + timedelta(seconds=10) + time.sleep(0.001) + response.set_cookie("datetime", expires=expires) + datetime_cookie = response.cookies["datetime"] + self.assertEqual(datetime_cookie["max-age"], 10) + + def test_create_cookie_after_deleting_cookie(self): + """Setting a cookie after deletion clears the expiry date.""" + response = HttpResponse() + response.set_cookie("c", "old-value") + self.assertEqual(response.cookies["c"]["expires"], "") + response.delete_cookie("c") + self.assertEqual( + response.cookies["c"]["expires"], "Thu, 01 Jan 1970 00:00:00 GMT" + ) + response.set_cookie("c", "new-value") + self.assertEqual(response.cookies["c"]["expires"], "") + + def test_far_expiration(self): + """Cookie will expire when a distant expiration time is provided.""" + response = HttpResponse() + future_datetime = datetime( + date.today().year + 2, 1, 1, 4, 5, 6, tzinfo=timezone.utc + ) + response.set_cookie("datetime", expires=future_datetime) + datetime_cookie = response.cookies["datetime"] + self.assertIn( + datetime_cookie["expires"], + # assertIn accounts for slight time dependency (#23450) + ( + format_datetime_rfc5322(future_datetime, usegmt=True), + format_datetime_rfc5322(future_datetime.replace(second=7), usegmt=True), + ), + ) + + def test_max_age_expiration(self): + """Cookie will expire if max_age is provided.""" + response = HttpResponse() + set_cookie_time = time.time() + with freeze_time(set_cookie_time): + response.set_cookie("max_age", max_age=10) + max_age_cookie = response.cookies["max_age"] + self.assertEqual(max_age_cookie["max-age"], 10) + self.assertEqual(max_age_cookie["expires"], http_date(set_cookie_time + 10)) + + def test_max_age_int(self): + response = HttpResponse() + response.set_cookie("max_age", max_age=10.6) + self.assertEqual(response.cookies["max_age"]["max-age"], 10) + + def test_max_age_timedelta(self): + response = HttpResponse() + response.set_cookie("max_age", max_age=timedelta(hours=1)) + self.assertEqual(response.cookies["max_age"]["max-age"], 3600) + + def test_max_age_with_expires(self): + response = HttpResponse() + msg = "'expires' and 'max_age' can't be used together." + with self.assertRaisesMessage(ValueError, msg): + response.set_cookie( + "max_age", expires=datetime(2000, 1, 1), max_age=timedelta(hours=1) + ) + + def test_httponly_cookie(self): + response = HttpResponse() + response.set_cookie("example", httponly=True) + example_cookie = response.cookies["example"] + self.assertIn( + "; %s" % cookies.Morsel._reserved["httponly"], str(example_cookie) + ) + self.assertIs(example_cookie["httponly"], True) + + def test_unicode_cookie(self): + """HttpResponse.set_cookie() works with Unicode data.""" + response = HttpResponse() + cookie_value = "清風" + response.set_cookie("test", cookie_value) + self.assertEqual(response.cookies["test"].value, cookie_value) + + def test_samesite(self): + response = HttpResponse() + response.set_cookie("example", samesite="None") + self.assertEqual(response.cookies["example"]["samesite"], "None") + response.set_cookie("example", samesite="Lax") + self.assertEqual(response.cookies["example"]["samesite"], "Lax") + response.set_cookie("example", samesite="strict") + self.assertEqual(response.cookies["example"]["samesite"], "strict") + + def test_invalid_samesite(self): + msg = 'samesite must be "lax", "none", or "strict".' + with self.assertRaisesMessage(ValueError, msg): + HttpResponse().set_cookie("example", samesite="invalid") + + +class DeleteCookieTests(SimpleTestCase): + def test_default(self): + response = HttpResponse() + response.delete_cookie("c") + cookie = response.cookies["c"] + self.assertEqual(cookie["expires"], "Thu, 01 Jan 1970 00:00:00 GMT") + self.assertEqual(cookie["max-age"], 0) + self.assertEqual(cookie["path"], "/") + self.assertEqual(cookie["secure"], "") + self.assertEqual(cookie["domain"], "") + self.assertEqual(cookie["samesite"], "") + + def test_delete_cookie_secure_prefix(self): + """ + delete_cookie() sets the secure flag if the cookie name starts with + __Host- or __Secure- (without that, browsers ignore cookies with those + prefixes). + """ + response = HttpResponse() + for prefix in ("Secure", "Host"): + with self.subTest(prefix=prefix): + cookie_name = "__%s-c" % prefix + response.delete_cookie(cookie_name) + self.assertIs(response.cookies[cookie_name]["secure"], True) + + def test_delete_cookie_secure_samesite_none(self): + # delete_cookie() sets the secure flag if samesite='none'. + response = HttpResponse() + response.delete_cookie("c", samesite="none") + self.assertIs(response.cookies["c"]["secure"], True) + + def test_delete_cookie_samesite(self): + response = HttpResponse() + response.delete_cookie("c", samesite="lax") + self.assertEqual(response.cookies["c"]["samesite"], "lax") diff --git a/testbed/django__django/tests/responses/test_fileresponse.py b/testbed/django__django/tests/responses/test_fileresponse.py new file mode 100644 index 0000000000000000000000000000000000000000..d14eb82c6266f3ae14ef80647dd9c2010355e3da --- /dev/null +++ b/testbed/django__django/tests/responses/test_fileresponse.py @@ -0,0 +1,288 @@ +import io +import itertools +import os +import sys +import tempfile +from unittest import skipIf + +from django.core.files.base import ContentFile +from django.http import FileResponse +from django.test import SimpleTestCase + + +class UnseekableBytesIO(io.BytesIO): + def seekable(self): + return False + + +class FileResponseTests(SimpleTestCase): + def test_content_length_file(self): + response = FileResponse(open(__file__, "rb")) + response.close() + self.assertEqual( + response.headers["Content-Length"], str(os.path.getsize(__file__)) + ) + + def test_content_length_buffer(self): + response = FileResponse(io.BytesIO(b"binary content")) + self.assertEqual(response.headers["Content-Length"], "14") + + def test_content_length_nonzero_starting_position_file(self): + file = open(__file__, "rb") + file.seek(10) + response = FileResponse(file) + response.close() + self.assertEqual( + response.headers["Content-Length"], str(os.path.getsize(__file__) - 10) + ) + + def test_content_length_nonzero_starting_position_buffer(self): + test_tuples = ( + ("BytesIO", io.BytesIO), + ("UnseekableBytesIO", UnseekableBytesIO), + ) + for buffer_class_name, BufferClass in test_tuples: + with self.subTest(buffer_class_name=buffer_class_name): + buffer = BufferClass(b"binary content") + buffer.seek(10) + response = FileResponse(buffer) + self.assertEqual(response.headers["Content-Length"], "4") + + def test_content_length_nonzero_starting_position_file_seekable_no_tell(self): + class TestFile: + def __init__(self, path, *args, **kwargs): + self._file = open(path, *args, **kwargs) + + def read(self, n_bytes=-1): + return self._file.read(n_bytes) + + def seek(self, offset, whence=io.SEEK_SET): + return self._file.seek(offset, whence) + + def seekable(self): + return True + + @property + def name(self): + return self._file.name + + def close(self): + if self._file: + self._file.close() + self._file = None + + def __enter__(self): + return self + + def __exit__(self, e_type, e_val, e_tb): + self.close() + + file = TestFile(__file__, "rb") + file.seek(10) + response = FileResponse(file) + response.close() + self.assertEqual( + response.headers["Content-Length"], str(os.path.getsize(__file__) - 10) + ) + + def test_content_type_file(self): + response = FileResponse(open(__file__, "rb")) + response.close() + self.assertIn(response.headers["Content-Type"], ["text/x-python", "text/plain"]) + + def test_content_type_buffer(self): + response = FileResponse(io.BytesIO(b"binary content")) + self.assertEqual(response.headers["Content-Type"], "application/octet-stream") + + def test_content_type_buffer_explicit(self): + response = FileResponse( + io.BytesIO(b"binary content"), content_type="video/webm" + ) + self.assertEqual(response.headers["Content-Type"], "video/webm") + + def test_content_type_buffer_explicit_default(self): + response = FileResponse( + io.BytesIO(b"binary content"), content_type="text/html; charset=utf-8" + ) + self.assertEqual(response.headers["Content-Type"], "text/html; charset=utf-8") + + def test_content_type_buffer_named(self): + test_tuples = ( + (__file__, ["text/x-python", "text/plain"]), + (__file__ + "nosuchfile", ["application/octet-stream"]), + ("test_fileresponse.py", ["text/x-python", "text/plain"]), + ("test_fileresponse.pynosuchfile", ["application/octet-stream"]), + ) + for filename, content_types in test_tuples: + with self.subTest(filename=filename): + buffer = io.BytesIO(b"binary content") + buffer.name = filename + response = FileResponse(buffer) + self.assertIn(response.headers["Content-Type"], content_types) + + def test_content_disposition_file(self): + filenames = ( + ("", "test_fileresponse.py"), + ("custom_name.py", "custom_name.py"), + ) + dispositions = ( + (False, "inline"), + (True, "attachment"), + ) + for (filename, header_filename), ( + as_attachment, + header_disposition, + ) in itertools.product(filenames, dispositions): + with self.subTest(filename=filename, disposition=header_disposition): + response = FileResponse( + open(__file__, "rb"), filename=filename, as_attachment=as_attachment + ) + response.close() + self.assertEqual( + response.headers["Content-Disposition"], + '%s; filename="%s"' % (header_disposition, header_filename), + ) + + def test_content_disposition_escaping(self): + # fmt: off + tests = [ + ( + 'multi-part-one";\" dummy".txt', + r"multi-part-one\";\" dummy\".txt" + ), + ] + # fmt: on + # Non-escape sequence backslashes are path segments on Windows, and are + # eliminated by an os.path.basename() check in FileResponse. + if sys.platform != "win32": + # fmt: off + tests += [ + ( + 'multi-part-one\\";\" dummy".txt', + r"multi-part-one\\\";\" dummy\".txt" + ), + ( + 'multi-part-one\\";\\\" dummy".txt', + r"multi-part-one\\\";\\\" dummy\".txt" + ) + ] + # fmt: on + for filename, escaped in tests: + with self.subTest(filename=filename, escaped=escaped): + response = FileResponse( + io.BytesIO(b"binary content"), filename=filename, as_attachment=True + ) + response.close() + self.assertEqual( + response.headers["Content-Disposition"], + f'attachment; filename="{escaped}"', + ) + + def test_content_disposition_buffer(self): + response = FileResponse(io.BytesIO(b"binary content")) + self.assertFalse(response.has_header("Content-Disposition")) + + def test_content_disposition_buffer_attachment(self): + response = FileResponse(io.BytesIO(b"binary content"), as_attachment=True) + self.assertEqual(response.headers["Content-Disposition"], "attachment") + + def test_content_disposition_buffer_explicit_filename(self): + dispositions = ( + (False, "inline"), + (True, "attachment"), + ) + for as_attachment, header_disposition in dispositions: + response = FileResponse( + io.BytesIO(b"binary content"), + as_attachment=as_attachment, + filename="custom_name.py", + ) + self.assertEqual( + response.headers["Content-Disposition"], + '%s; filename="custom_name.py"' % header_disposition, + ) + + def test_response_buffer(self): + response = FileResponse(io.BytesIO(b"binary content")) + self.assertEqual(list(response), [b"binary content"]) + + def test_response_nonzero_starting_position(self): + test_tuples = ( + ("BytesIO", io.BytesIO), + ("UnseekableBytesIO", UnseekableBytesIO), + ) + for buffer_class_name, BufferClass in test_tuples: + with self.subTest(buffer_class_name=buffer_class_name): + buffer = BufferClass(b"binary content") + buffer.seek(10) + response = FileResponse(buffer) + self.assertEqual(list(response), [b"tent"]) + + def test_buffer_explicit_absolute_filename(self): + """ + Headers are set correctly with a buffer when an absolute filename is + provided. + """ + response = FileResponse(io.BytesIO(b"binary content"), filename=__file__) + self.assertEqual(response.headers["Content-Length"], "14") + self.assertEqual( + response.headers["Content-Disposition"], + 'inline; filename="test_fileresponse.py"', + ) + + @skipIf(sys.platform == "win32", "Named pipes are Unix-only.") + def test_file_from_named_pipe_response(self): + with tempfile.TemporaryDirectory() as temp_dir: + pipe_file = os.path.join(temp_dir, "named_pipe") + os.mkfifo(pipe_file) + pipe_for_read = os.open(pipe_file, os.O_RDONLY | os.O_NONBLOCK) + with open(pipe_file, "wb") as pipe_for_write: + pipe_for_write.write(b"binary content") + + response = FileResponse(os.fdopen(pipe_for_read, mode="rb")) + response_content = list(response) + response.close() + self.assertEqual(response_content, [b"binary content"]) + self.assertFalse(response.has_header("Content-Length")) + + def test_compressed_response(self): + """ + If compressed responses are served with the uncompressed Content-Type + and a compression Content-Encoding, browsers might automatically + uncompress the file, which is most probably not wanted. + """ + test_tuples = ( + (".tar.gz", "application/gzip"), + (".tar.br", "application/x-brotli"), + (".tar.bz2", "application/x-bzip"), + (".tar.xz", "application/x-xz"), + (".tar.Z", "application/x-compress"), + ) + for extension, mimetype in test_tuples: + with self.subTest(ext=extension): + with tempfile.NamedTemporaryFile(suffix=extension) as tmp: + response = FileResponse(tmp) + self.assertEqual(response.headers["Content-Type"], mimetype) + self.assertFalse(response.has_header("Content-Encoding")) + + def test_unicode_attachment(self): + response = FileResponse( + ContentFile(b"binary content", name="祝您平安.odt"), + as_attachment=True, + content_type="application/vnd.oasis.opendocument.text", + ) + self.assertEqual( + response.headers["Content-Type"], + "application/vnd.oasis.opendocument.text", + ) + self.assertEqual( + response.headers["Content-Disposition"], + "attachment; filename*=utf-8''%E7%A5%9D%E6%82%A8%E5%B9%B3%E5%AE%89.odt", + ) + + def test_repr(self): + response = FileResponse(io.BytesIO(b"binary content")) + self.assertEqual( + repr(response), + '', + ) diff --git a/testbed/django__django/tests/responses/tests.py b/testbed/django__django/tests/responses/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..b16cb533648ef5c9bf7e02138590728ccc2a479d --- /dev/null +++ b/testbed/django__django/tests/responses/tests.py @@ -0,0 +1,180 @@ +import io + +from django.conf import settings +from django.core.cache import cache +from django.http import HttpResponse +from django.http.response import HttpResponseBase +from django.test import SimpleTestCase + +UTF8 = "utf-8" +ISO88591 = "iso-8859-1" + + +class HttpResponseBaseTests(SimpleTestCase): + def test_closed(self): + r = HttpResponseBase() + self.assertIs(r.closed, False) + + r.close() + self.assertIs(r.closed, True) + + def test_write(self): + r = HttpResponseBase() + self.assertIs(r.writable(), False) + + with self.assertRaisesMessage( + OSError, "This HttpResponseBase instance is not writable" + ): + r.write("asdf") + with self.assertRaisesMessage( + OSError, "This HttpResponseBase instance is not writable" + ): + r.writelines(["asdf\n", "qwer\n"]) + + def test_tell(self): + r = HttpResponseBase() + with self.assertRaisesMessage( + OSError, "This HttpResponseBase instance cannot tell its position" + ): + r.tell() + + def test_setdefault(self): + """ + HttpResponseBase.setdefault() should not change an existing header + and should be case insensitive. + """ + r = HttpResponseBase() + + r.headers["Header"] = "Value" + r.setdefault("header", "changed") + self.assertEqual(r.headers["header"], "Value") + + r.setdefault("x-header", "DefaultValue") + self.assertEqual(r.headers["X-Header"], "DefaultValue") + + def test_charset_setter(self): + r = HttpResponseBase() + r.charset = "utf-8" + self.assertEqual(r.charset, "utf-8") + + def test_reason_phrase_setter(self): + r = HttpResponseBase() + r.reason_phrase = "test" + self.assertEqual(r.reason_phrase, "test") + + +class HttpResponseTests(SimpleTestCase): + def test_status_code(self): + resp = HttpResponse(status=503) + self.assertEqual(resp.status_code, 503) + self.assertEqual(resp.reason_phrase, "Service Unavailable") + + def test_change_status_code(self): + resp = HttpResponse() + resp.status_code = 503 + self.assertEqual(resp.status_code, 503) + self.assertEqual(resp.reason_phrase, "Service Unavailable") + + def test_valid_status_code_string(self): + resp = HttpResponse(status="100") + self.assertEqual(resp.status_code, 100) + resp = HttpResponse(status="404") + self.assertEqual(resp.status_code, 404) + resp = HttpResponse(status="599") + self.assertEqual(resp.status_code, 599) + + def test_invalid_status_code(self): + must_be_integer = "HTTP status code must be an integer." + must_be_integer_in_range = ( + "HTTP status code must be an integer from 100 to 599." + ) + with self.assertRaisesMessage(TypeError, must_be_integer): + HttpResponse(status=object()) + with self.assertRaisesMessage(TypeError, must_be_integer): + HttpResponse(status="J'attendrai") + with self.assertRaisesMessage(ValueError, must_be_integer_in_range): + HttpResponse(status=99) + with self.assertRaisesMessage(ValueError, must_be_integer_in_range): + HttpResponse(status=600) + + def test_reason_phrase(self): + reason = "I'm an anarchist coffee pot on crack." + resp = HttpResponse(status=419, reason=reason) + self.assertEqual(resp.status_code, 419) + self.assertEqual(resp.reason_phrase, reason) + + def test_charset_detection(self): + """HttpResponse should parse charset from content_type.""" + response = HttpResponse("ok") + self.assertEqual(response.charset, settings.DEFAULT_CHARSET) + + response = HttpResponse(charset=ISO88591) + self.assertEqual(response.charset, ISO88591) + self.assertEqual( + response.headers["Content-Type"], "text/html; charset=%s" % ISO88591 + ) + + response = HttpResponse( + content_type="text/plain; charset=%s" % UTF8, charset=ISO88591 + ) + self.assertEqual(response.charset, ISO88591) + + response = HttpResponse(content_type="text/plain; charset=%s" % ISO88591) + self.assertEqual(response.charset, ISO88591) + + response = HttpResponse(content_type='text/plain; charset="%s"' % ISO88591) + self.assertEqual(response.charset, ISO88591) + + response = HttpResponse(content_type="text/plain; charset=") + self.assertEqual(response.charset, settings.DEFAULT_CHARSET) + + response = HttpResponse(content_type="text/plain") + self.assertEqual(response.charset, settings.DEFAULT_CHARSET) + + def test_response_content_charset(self): + """HttpResponse should encode based on charset.""" + content = "Café :)" + utf8_content = content.encode(UTF8) + iso_content = content.encode(ISO88591) + + response = HttpResponse(utf8_content) + self.assertContains(response, utf8_content) + + response = HttpResponse( + iso_content, content_type="text/plain; charset=%s" % ISO88591 + ) + self.assertContains(response, iso_content) + + response = HttpResponse(iso_content) + self.assertContains(response, iso_content) + + response = HttpResponse(iso_content, content_type="text/plain") + self.assertContains(response, iso_content) + + def test_repr(self): + response = HttpResponse(content="Café :)".encode(UTF8), status=201) + expected = '' + self.assertEqual(repr(response), expected) + + def test_repr_no_content_type(self): + response = HttpResponse(status=204) + del response.headers["Content-Type"] + self.assertEqual(repr(response), "") + + def test_wrap_textiowrapper(self): + content = "Café :)" + r = HttpResponse() + with io.TextIOWrapper(r, UTF8) as buf: + buf.write(content) + self.assertEqual(r.content, content.encode(UTF8)) + + def test_generator_cache(self): + generator = (str(i) for i in range(10)) + response = HttpResponse(content=generator) + self.assertEqual(response.content, b"0123456789") + with self.assertRaises(StopIteration): + next(generator) + + cache.set("my-response-key", response) + response = cache.get("my-response-key") + self.assertEqual(response.content, b"0123456789") diff --git a/testbed/django__django/tests/reverse_lookup/__init__.py b/testbed/django__django/tests/reverse_lookup/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/reverse_lookup/models.py b/testbed/django__django/tests/reverse_lookup/models.py new file mode 100644 index 0000000000000000000000000000000000000000..7c249b4fda6f953fd9737cfebf89475d4d93a1ba --- /dev/null +++ b/testbed/django__django/tests/reverse_lookup/models.py @@ -0,0 +1,24 @@ +""" +Reverse lookups + +This demonstrates the reverse lookup features of the database API. +""" + +from django.db import models + + +class User(models.Model): + name = models.CharField(max_length=200) + + +class Poll(models.Model): + question = models.CharField(max_length=200) + creator = models.ForeignKey(User, models.CASCADE) + + +class Choice(models.Model): + name = models.CharField(max_length=100) + poll = models.ForeignKey(Poll, models.CASCADE, related_name="poll_choice") + related_poll = models.ForeignKey( + Poll, models.CASCADE, related_name="related_choice" + ) diff --git a/testbed/django__django/tests/reverse_lookup/tests.py b/testbed/django__django/tests/reverse_lookup/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..2417a1ed62a85f910d707a20db4db7b6483a9b34 --- /dev/null +++ b/testbed/django__django/tests/reverse_lookup/tests.py @@ -0,0 +1,45 @@ +from django.core.exceptions import FieldError +from django.test import TestCase + +from .models import Choice, Poll, User + + +class ReverseLookupTests(TestCase): + @classmethod + def setUpTestData(cls): + john = User.objects.create(name="John Doe") + jim = User.objects.create(name="Jim Bo") + first_poll = Poll.objects.create( + question="What's the first question?", creator=john + ) + second_poll = Poll.objects.create( + question="What's the second question?", creator=jim + ) + Choice.objects.create( + poll=first_poll, related_poll=second_poll, name="This is the answer." + ) + + def test_reverse_by_field(self): + u1 = User.objects.get(poll__question__exact="What's the first question?") + self.assertEqual(u1.name, "John Doe") + + u2 = User.objects.get(poll__question__exact="What's the second question?") + self.assertEqual(u2.name, "Jim Bo") + + def test_reverse_by_related_name(self): + p1 = Poll.objects.get(poll_choice__name__exact="This is the answer.") + self.assertEqual(p1.question, "What's the first question?") + + p2 = Poll.objects.get(related_choice__name__exact="This is the answer.") + self.assertEqual(p2.question, "What's the second question?") + + def test_reverse_field_name_disallowed(self): + """ + If a related_name is given you can't use the field name instead + """ + msg = ( + "Cannot resolve keyword 'choice' into field. Choices are: " + "creator, creator_id, id, poll_choice, question, related_choice" + ) + with self.assertRaisesMessage(FieldError, msg): + Poll.objects.get(choice__name__exact="This is the answer") diff --git a/testbed/django__django/tests/runtests.py b/testbed/django__django/tests/runtests.py new file mode 100644 index 0000000000000000000000000000000000000000..a3550c553c83a377834b6ee3e39e6ee202ed1a19 --- /dev/null +++ b/testbed/django__django/tests/runtests.py @@ -0,0 +1,791 @@ +#!/usr/bin/env python +import argparse +import atexit +import copy +import gc +import multiprocessing +import os +import shutil +import socket +import subprocess +import sys +import tempfile +import warnings +from pathlib import Path + +try: + import django +except ImportError as e: + raise RuntimeError( + "Django module not found, reference tests/README.rst for instructions." + ) from e +else: + from django.apps import apps + from django.conf import settings + from django.core.exceptions import ImproperlyConfigured + from django.db import connection, connections + from django.test import TestCase, TransactionTestCase + from django.test.runner import get_max_test_processes, parallel_type + from django.test.selenium import SeleniumTestCaseBase + from django.test.utils import NullTimeKeeper, TimeKeeper, get_runner + from django.utils.deprecation import ( + RemovedInDjango51Warning, + RemovedInDjango60Warning, + ) + from django.utils.log import DEFAULT_LOGGING + +try: + import MySQLdb +except ImportError: + pass +else: + # Ignore informational warnings from QuerySet.explain(). + warnings.filterwarnings("ignore", r"\(1003, *", category=MySQLdb.Warning) + +# Make deprecation warnings errors to ensure no usage of deprecated features. +warnings.simplefilter("error", RemovedInDjango60Warning) +warnings.simplefilter("error", RemovedInDjango51Warning) +# Make resource and runtime warning errors to ensure no usage of error prone +# patterns. +warnings.simplefilter("error", ResourceWarning) +warnings.simplefilter("error", RuntimeWarning) +# Ignore known warnings in test dependencies. +warnings.filterwarnings( + "ignore", "'U' mode is deprecated", DeprecationWarning, module="docutils.io" +) + +# Reduce garbage collection frequency to improve performance. Since CPython +# uses refcounting, garbage collection only collects objects with cyclic +# references, which are a minority, so the garbage collection threshold can be +# larger than the default threshold of 700 allocations + deallocations without +# much increase in memory usage. +gc.set_threshold(100_000) + +RUNTESTS_DIR = os.path.abspath(os.path.dirname(__file__)) + +TEMPLATE_DIR = os.path.join(RUNTESTS_DIR, "templates") + +# Create a specific subdirectory for the duration of the test suite. +TMPDIR = tempfile.mkdtemp(prefix="django_") +# Set the TMPDIR environment variable in addition to tempfile.tempdir +# so that children processes inherit it. +tempfile.tempdir = os.environ["TMPDIR"] = TMPDIR + +# Removing the temporary TMPDIR. +atexit.register(shutil.rmtree, TMPDIR) + + +# This is a dict mapping RUNTESTS_DIR subdirectory to subdirectories of that +# directory to skip when searching for test modules. +SUBDIRS_TO_SKIP = { + "": {"import_error_package", "test_runner_apps"}, + "gis_tests": {"data"}, +} + +ALWAYS_INSTALLED_APPS = [ + "django.contrib.contenttypes", + "django.contrib.auth", + "django.contrib.sites", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.admin.apps.SimpleAdminConfig", + "django.contrib.staticfiles", +] + +ALWAYS_MIDDLEWARE = [ + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", +] + +# Need to add the associated contrib app to INSTALLED_APPS in some cases to +# avoid "RuntimeError: Model class X doesn't declare an explicit app_label +# and isn't in an application in INSTALLED_APPS." +CONTRIB_TESTS_TO_APPS = { + "deprecation": ["django.contrib.flatpages", "django.contrib.redirects"], + "flatpages_tests": ["django.contrib.flatpages"], + "redirects_tests": ["django.contrib.redirects"], +} + + +def get_test_modules(gis_enabled): + """ + Scan the tests directory and yield the names of all test modules. + + The yielded names have either one dotted part like "test_runner" or, in + the case of GIS tests, two dotted parts like "gis_tests.gdal_tests". + """ + discovery_dirs = [""] + if gis_enabled: + # GIS tests are in nested apps + discovery_dirs.append("gis_tests") + else: + SUBDIRS_TO_SKIP[""].add("gis_tests") + + for dirname in discovery_dirs: + dirpath = os.path.join(RUNTESTS_DIR, dirname) + subdirs_to_skip = SUBDIRS_TO_SKIP[dirname] + with os.scandir(dirpath) as entries: + for f in entries: + if ( + "." in f.name + or os.path.basename(f.name) in subdirs_to_skip + or f.is_file() + or not os.path.exists(os.path.join(f.path, "__init__.py")) + ): + continue + test_module = f.name + if dirname: + test_module = dirname + "." + test_module + yield test_module + + +def get_label_module(label): + """Return the top-level module part for a test label.""" + path = Path(label) + if len(path.parts) == 1: + # Interpret the label as a dotted module name. + return label.split(".")[0] + + # Otherwise, interpret the label as a path. Check existence first to + # provide a better error message than relative_to() if it doesn't exist. + if not path.exists(): + raise RuntimeError(f"Test label path {label} does not exist") + path = path.resolve() + rel_path = path.relative_to(RUNTESTS_DIR) + return rel_path.parts[0] + + +def get_filtered_test_modules(start_at, start_after, gis_enabled, test_labels=None): + if test_labels is None: + test_labels = [] + # Reduce each test label to just the top-level module part. + label_modules = set() + for label in test_labels: + test_module = get_label_module(label) + label_modules.add(test_module) + + # It would be nice to put this validation earlier but it must come after + # django.setup() so that connection.features.gis_enabled can be accessed. + if "gis_tests" in label_modules and not gis_enabled: + print("Aborting: A GIS database backend is required to run gis_tests.") + sys.exit(1) + + def _module_match_label(module_name, label): + # Exact or ancestor match. + return module_name == label or module_name.startswith(label + ".") + + start_label = start_at or start_after + for test_module in get_test_modules(gis_enabled): + if start_label: + if not _module_match_label(test_module, start_label): + continue + start_label = "" + if not start_at: + assert start_after + # Skip the current one before starting. + continue + # If the module (or an ancestor) was named on the command line, or + # no modules were named (i.e., run all), include the test module. + if not test_labels or any( + _module_match_label(test_module, label_module) + for label_module in label_modules + ): + yield test_module + + +def setup_collect_tests(start_at, start_after, test_labels=None): + state = { + "INSTALLED_APPS": settings.INSTALLED_APPS, + "ROOT_URLCONF": getattr(settings, "ROOT_URLCONF", ""), + "TEMPLATES": settings.TEMPLATES, + "LANGUAGE_CODE": settings.LANGUAGE_CODE, + "STATIC_URL": settings.STATIC_URL, + "STATIC_ROOT": settings.STATIC_ROOT, + "MIDDLEWARE": settings.MIDDLEWARE, + } + + # Redirect some settings for the duration of these tests. + settings.INSTALLED_APPS = ALWAYS_INSTALLED_APPS + settings.ROOT_URLCONF = "urls" + settings.STATIC_URL = "static/" + settings.STATIC_ROOT = os.path.join(TMPDIR, "static") + settings.TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [TEMPLATE_DIR], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + } + ] + settings.LANGUAGE_CODE = "en" + settings.SITE_ID = 1 + settings.MIDDLEWARE = ALWAYS_MIDDLEWARE + settings.MIGRATION_MODULES = { + # This lets us skip creating migrations for the test models as many of + # them depend on one of the following contrib applications. + "auth": None, + "contenttypes": None, + "sessions": None, + } + log_config = copy.deepcopy(DEFAULT_LOGGING) + # Filter out non-error logging so we don't have to capture it in lots of + # tests. + log_config["loggers"]["django"]["level"] = "ERROR" + settings.LOGGING = log_config + settings.SILENCED_SYSTEM_CHECKS = [ + "fields.W342", # ForeignKey(unique=True) -> OneToOneField + # django.contrib.postgres.fields.CICharField deprecated. + "fields.W905", + "postgres.W004", + # django.contrib.postgres.fields.CIEmailField deprecated. + "fields.W906", + # django.contrib.postgres.fields.CITextField deprecated. + "fields.W907", + ] + + # Load all the ALWAYS_INSTALLED_APPS. + django.setup() + + # This flag must be evaluated after django.setup() because otherwise it can + # raise AppRegistryNotReady when running gis_tests in isolation on some + # backends (e.g. PostGIS). + gis_enabled = connection.features.gis_enabled + + test_modules = list( + get_filtered_test_modules( + start_at, + start_after, + gis_enabled, + test_labels=test_labels, + ) + ) + return test_modules, state + + +def teardown_collect_tests(state): + # Restore the old settings. + for key, value in state.items(): + setattr(settings, key, value) + + +def get_installed(): + return [app_config.name for app_config in apps.get_app_configs()] + + +# This function should be called only after calling django.setup(), +# since it calls connection.features.gis_enabled. +def get_apps_to_install(test_modules): + for test_module in test_modules: + if test_module in CONTRIB_TESTS_TO_APPS: + yield from CONTRIB_TESTS_TO_APPS[test_module] + yield test_module + + # Add contrib.gis to INSTALLED_APPS if needed (rather than requiring + # @override_settings(INSTALLED_APPS=...) on all test cases. + if connection.features.gis_enabled: + yield "django.contrib.gis" + + +def setup_run_tests(verbosity, start_at, start_after, test_labels=None): + test_modules, state = setup_collect_tests( + start_at, start_after, test_labels=test_labels + ) + + installed_apps = set(get_installed()) + for app in get_apps_to_install(test_modules): + if app in installed_apps: + continue + if verbosity >= 2: + print(f"Importing application {app}") + settings.INSTALLED_APPS.append(app) + installed_apps.add(app) + + apps.set_installed_apps(settings.INSTALLED_APPS) + + # Force declaring available_apps in TransactionTestCase for faster tests. + def no_available_apps(self): + raise Exception( + "Please define available_apps in TransactionTestCase and its subclasses." + ) + + TransactionTestCase.available_apps = property(no_available_apps) + TestCase.available_apps = None + + # Set an environment variable that other code may consult to see if + # Django's own test suite is running. + os.environ["RUNNING_DJANGOS_TEST_SUITE"] = "true" + + test_labels = test_labels or test_modules + return test_labels, state + + +def teardown_run_tests(state): + teardown_collect_tests(state) + # Discard the multiprocessing.util finalizer that tries to remove a + # temporary directory that's already removed by this script's + # atexit.register(shutil.rmtree, TMPDIR) handler. Prevents + # FileNotFoundError at the end of a test run (#27890). + from multiprocessing.util import _finalizer_registry + + _finalizer_registry.pop((-100, 0), None) + del os.environ["RUNNING_DJANGOS_TEST_SUITE"] + + +class ActionSelenium(argparse.Action): + """ + Validate the comma-separated list of requested browsers. + """ + + def __call__(self, parser, namespace, values, option_string=None): + try: + import selenium # NOQA + except ImportError as e: + raise ImproperlyConfigured(f"Error loading selenium module: {e}") + browsers = values.split(",") + for browser in browsers: + try: + SeleniumTestCaseBase.import_webdriver(browser) + except ImportError: + raise argparse.ArgumentError( + self, "Selenium browser specification '%s' is not valid." % browser + ) + setattr(namespace, self.dest, browsers) + + +def django_tests( + verbosity, + interactive, + failfast, + keepdb, + reverse, + test_labels, + debug_sql, + parallel, + tags, + exclude_tags, + test_name_patterns, + start_at, + start_after, + pdb, + buffer, + timing, + shuffle, +): + if parallel in {0, "auto"}: + max_parallel = get_max_test_processes() + else: + max_parallel = parallel + + if verbosity >= 1: + msg = "Testing against Django installed in '%s'" % os.path.dirname( + django.__file__ + ) + if max_parallel > 1: + msg += " with up to %d processes" % max_parallel + print(msg) + + process_setup_args = (verbosity, start_at, start_after, test_labels) + test_labels, state = setup_run_tests(*process_setup_args) + # Run the test suite, including the extra validation tests. + if not hasattr(settings, "TEST_RUNNER"): + settings.TEST_RUNNER = "django.test.runner.DiscoverRunner" + + if parallel in {0, "auto"}: + # This doesn't work before django.setup() on some databases. + if all(conn.features.can_clone_databases for conn in connections.all()): + parallel = max_parallel + else: + parallel = 1 + + TestRunner = get_runner(settings) + TestRunner.parallel_test_suite.process_setup = setup_run_tests + TestRunner.parallel_test_suite.process_setup_args = process_setup_args + test_runner = TestRunner( + verbosity=verbosity, + interactive=interactive, + failfast=failfast, + keepdb=keepdb, + reverse=reverse, + debug_sql=debug_sql, + parallel=parallel, + tags=tags, + exclude_tags=exclude_tags, + test_name_patterns=test_name_patterns, + pdb=pdb, + buffer=buffer, + timing=timing, + shuffle=shuffle, + ) + failures = test_runner.run_tests(test_labels) + teardown_run_tests(state) + return failures + + +def collect_test_modules(start_at, start_after): + test_modules, state = setup_collect_tests(start_at, start_after) + teardown_collect_tests(state) + return test_modules + + +def get_subprocess_args(options): + subprocess_args = [sys.executable, __file__, "--settings=%s" % options.settings] + if options.failfast: + subprocess_args.append("--failfast") + if options.verbosity: + subprocess_args.append("--verbosity=%s" % options.verbosity) + if not options.interactive: + subprocess_args.append("--noinput") + if options.tags: + subprocess_args.append("--tag=%s" % options.tags) + if options.exclude_tags: + subprocess_args.append("--exclude_tag=%s" % options.exclude_tags) + if options.shuffle is not False: + if options.shuffle is None: + subprocess_args.append("--shuffle") + else: + subprocess_args.append("--shuffle=%s" % options.shuffle) + return subprocess_args + + +def bisect_tests(bisection_label, options, test_labels, start_at, start_after): + if not test_labels: + test_labels = collect_test_modules(start_at, start_after) + + print("***** Bisecting test suite: %s" % " ".join(test_labels)) + + # Make sure the bisection point isn't in the test list + # Also remove tests that need to be run in specific combinations + for label in [bisection_label, "model_inheritance_same_model_name"]: + try: + test_labels.remove(label) + except ValueError: + pass + + subprocess_args = get_subprocess_args(options) + + iteration = 1 + while len(test_labels) > 1: + midpoint = len(test_labels) // 2 + test_labels_a = test_labels[:midpoint] + [bisection_label] + test_labels_b = test_labels[midpoint:] + [bisection_label] + print("***** Pass %da: Running the first half of the test suite" % iteration) + print("***** Test labels: %s" % " ".join(test_labels_a)) + failures_a = subprocess.run(subprocess_args + test_labels_a) + + print("***** Pass %db: Running the second half of the test suite" % iteration) + print("***** Test labels: %s" % " ".join(test_labels_b)) + print("") + failures_b = subprocess.run(subprocess_args + test_labels_b) + + if failures_a.returncode and not failures_b.returncode: + print("***** Problem found in first half. Bisecting again...") + iteration += 1 + test_labels = test_labels_a[:-1] + elif failures_b.returncode and not failures_a.returncode: + print("***** Problem found in second half. Bisecting again...") + iteration += 1 + test_labels = test_labels_b[:-1] + elif failures_a.returncode and failures_b.returncode: + print("***** Multiple sources of failure found") + break + else: + print("***** No source of failure found... try pair execution (--pair)") + break + + if len(test_labels) == 1: + print("***** Source of error: %s" % test_labels[0]) + + +def paired_tests(paired_test, options, test_labels, start_at, start_after): + if not test_labels: + test_labels = collect_test_modules(start_at, start_after) + + print("***** Trying paired execution") + + # Make sure the constant member of the pair isn't in the test list + # Also remove tests that need to be run in specific combinations + for label in [paired_test, "model_inheritance_same_model_name"]: + try: + test_labels.remove(label) + except ValueError: + pass + + subprocess_args = get_subprocess_args(options) + + for i, label in enumerate(test_labels): + print( + "***** %d of %d: Check test pairing with %s" + % (i + 1, len(test_labels), label) + ) + failures = subprocess.call(subprocess_args + [label, paired_test]) + if failures: + print("***** Found problem pair with %s" % label) + return + + print("***** No problem pair found") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Run the Django test suite.") + parser.add_argument( + "modules", + nargs="*", + metavar="module", + help='Optional path(s) to test modules; e.g. "i18n" or ' + '"i18n.tests.TranslationTests.test_lazy_objects".', + ) + parser.add_argument( + "-v", + "--verbosity", + default=1, + type=int, + choices=[0, 1, 2, 3], + help="Verbosity level; 0=minimal output, 1=normal output, 2=all output", + ) + parser.add_argument( + "--noinput", + action="store_false", + dest="interactive", + help="Tells Django to NOT prompt the user for input of any kind.", + ) + parser.add_argument( + "--failfast", + action="store_true", + help="Tells Django to stop running the test suite after first failed test.", + ) + parser.add_argument( + "--keepdb", + action="store_true", + help="Tells Django to preserve the test database between runs.", + ) + parser.add_argument( + "--settings", + help='Python path to settings module, e.g. "myproject.settings". If ' + "this isn't provided, either the DJANGO_SETTINGS_MODULE " + 'environment variable or "test_sqlite" will be used.', + ) + parser.add_argument( + "--bisect", + help="Bisect the test suite to discover a test that causes a test " + "failure when combined with the named test.", + ) + parser.add_argument( + "--pair", + help="Run the test suite in pairs with the named test to find problem pairs.", + ) + parser.add_argument( + "--shuffle", + nargs="?", + default=False, + type=int, + metavar="SEED", + help=( + "Shuffle the order of test cases to help check that tests are " + "properly isolated." + ), + ) + parser.add_argument( + "--reverse", + action="store_true", + help="Sort test suites and test cases in opposite order to debug " + "test side effects not apparent with normal execution lineup.", + ) + parser.add_argument( + "--selenium", + action=ActionSelenium, + metavar="BROWSERS", + help="A comma-separated list of browsers to run the Selenium tests against.", + ) + parser.add_argument( + "--headless", + action="store_true", + help="Run selenium tests in headless mode, if the browser supports the option.", + ) + parser.add_argument( + "--selenium-hub", + help="A URL for a selenium hub instance to use in combination with --selenium.", + ) + parser.add_argument( + "--external-host", + default=socket.gethostname(), + help=( + "The external host that can be reached by the selenium hub instance when " + "running Selenium tests via Selenium Hub." + ), + ) + parser.add_argument( + "--debug-sql", + action="store_true", + help="Turn on the SQL query logger within tests.", + ) + # 0 is converted to "auto" or 1 later on, depending on a method used by + # multiprocessing to start subprocesses and on the backend support for + # cloning databases. + parser.add_argument( + "--parallel", + nargs="?", + const="auto", + default=0, + type=parallel_type, + metavar="N", + help=( + 'Run tests using up to N parallel processes. Use the value "auto" ' + "to run one test process for each processor core." + ), + ) + parser.add_argument( + "--tag", + dest="tags", + action="append", + help="Run only tests with the specified tags. Can be used multiple times.", + ) + parser.add_argument( + "--exclude-tag", + dest="exclude_tags", + action="append", + help="Do not run tests with the specified tag. Can be used multiple times.", + ) + parser.add_argument( + "--start-after", + dest="start_after", + help="Run tests starting after the specified top-level module.", + ) + parser.add_argument( + "--start-at", + dest="start_at", + help="Run tests starting at the specified top-level module.", + ) + parser.add_argument( + "--pdb", action="store_true", help="Runs the PDB debugger on error or failure." + ) + parser.add_argument( + "-b", + "--buffer", + action="store_true", + help="Discard output of passing tests.", + ) + parser.add_argument( + "--timing", + action="store_true", + help="Output timings, including database set up and total run time.", + ) + parser.add_argument( + "-k", + dest="test_name_patterns", + action="append", + help=( + "Only run test methods and classes matching test name pattern. " + "Same as unittest -k option. Can be used multiple times." + ), + ) + + options = parser.parse_args() + + using_selenium_hub = options.selenium and options.selenium_hub + if options.selenium_hub and not options.selenium: + parser.error( + "--selenium-hub and --external-host require --selenium to be used." + ) + if using_selenium_hub and not options.external_host: + parser.error("--selenium-hub and --external-host must be used together.") + + # Allow including a trailing slash on app_labels for tab completion convenience + options.modules = [os.path.normpath(labels) for labels in options.modules] + + mutually_exclusive_options = [ + options.start_at, + options.start_after, + options.modules, + ] + enabled_module_options = [ + bool(option) for option in mutually_exclusive_options + ].count(True) + if enabled_module_options > 1: + print( + "Aborting: --start-at, --start-after, and test labels are mutually " + "exclusive." + ) + sys.exit(1) + for opt_name in ["start_at", "start_after"]: + opt_val = getattr(options, opt_name) + if opt_val: + if "." in opt_val: + print( + "Aborting: --%s must be a top-level module." + % opt_name.replace("_", "-") + ) + sys.exit(1) + setattr(options, opt_name, os.path.normpath(opt_val)) + if options.settings: + os.environ["DJANGO_SETTINGS_MODULE"] = options.settings + else: + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite") + options.settings = os.environ["DJANGO_SETTINGS_MODULE"] + + if options.selenium: + if multiprocessing.get_start_method() == "spawn" and options.parallel != 1: + parser.error( + "You cannot use --selenium with parallel tests on this system. " + "Pass --parallel=1 to use --selenium." + ) + if not options.tags: + options.tags = ["selenium"] + elif "selenium" not in options.tags: + options.tags.append("selenium") + if options.selenium_hub: + SeleniumTestCaseBase.selenium_hub = options.selenium_hub + SeleniumTestCaseBase.external_host = options.external_host + SeleniumTestCaseBase.headless = options.headless + SeleniumTestCaseBase.browsers = options.selenium + + if options.bisect: + bisect_tests( + options.bisect, + options, + options.modules, + options.start_at, + options.start_after, + ) + elif options.pair: + paired_tests( + options.pair, + options, + options.modules, + options.start_at, + options.start_after, + ) + else: + time_keeper = TimeKeeper() if options.timing else NullTimeKeeper() + with time_keeper.timed("Total run"): + failures = django_tests( + options.verbosity, + options.interactive, + options.failfast, + options.keepdb, + options.reverse, + options.modules, + options.debug_sql, + options.parallel, + options.tags, + options.exclude_tags, + getattr(options, "test_name_patterns", None), + options.start_at, + options.start_after, + options.pdb, + options.buffer, + options.timing, + options.shuffle, + ) + time_keeper.print_results() + if failures: + sys.exit(1) diff --git a/testbed/django__django/tests/save_delete_hooks/__init__.py b/testbed/django__django/tests/save_delete_hooks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/save_delete_hooks/models.py b/testbed/django__django/tests/save_delete_hooks/models.py new file mode 100644 index 0000000000000000000000000000000000000000..e7c598aeaeddc23ee3d86c4eb6364adb10b36f7f --- /dev/null +++ b/testbed/django__django/tests/save_delete_hooks/models.py @@ -0,0 +1,31 @@ +""" +Adding hooks before/after saving and deleting + +To execute arbitrary code around ``save()`` and ``delete()``, just subclass +the methods. +""" +from django.db import models + + +class Person(models.Model): + first_name = models.CharField(max_length=20) + last_name = models.CharField(max_length=20) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.data = [] + + def __str__(self): + return "%s %s" % (self.first_name, self.last_name) + + def save(self, *args, **kwargs): + self.data.append("Before save") + # Call the "real" save() method + super().save(*args, **kwargs) + self.data.append("After save") + + def delete(self): + self.data.append("Before deletion") + # Call the "real" delete() method + super().delete() + self.data.append("After deletion") diff --git a/testbed/django__django/tests/save_delete_hooks/tests.py b/testbed/django__django/tests/save_delete_hooks/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..4bd97b54c8d3328bfd1197188979cc9b1b4d24b4 --- /dev/null +++ b/testbed/django__django/tests/save_delete_hooks/tests.py @@ -0,0 +1,37 @@ +from django.test import TestCase + +from .models import Person + + +class SaveDeleteHookTests(TestCase): + def test_basic(self): + p = Person(first_name="John", last_name="Smith") + self.assertEqual(p.data, []) + p.save() + self.assertEqual( + p.data, + [ + "Before save", + "After save", + ], + ) + + self.assertQuerySetEqual( + Person.objects.all(), + [ + "John Smith", + ], + str, + ) + + p.delete() + self.assertEqual( + p.data, + [ + "Before save", + "After save", + "Before deletion", + "After deletion", + ], + ) + self.assertQuerySetEqual(Person.objects.all(), []) diff --git a/testbed/django__django/tests/schema/__init__.py b/testbed/django__django/tests/schema/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/schema/fields.py b/testbed/django__django/tests/schema/fields.py new file mode 100644 index 0000000000000000000000000000000000000000..998cb28b8cfb9b8655f4194efc5f6cfb1ff7e482 --- /dev/null +++ b/testbed/django__django/tests/schema/fields.py @@ -0,0 +1,104 @@ +from functools import partial + +from django.db import models +from django.db.models.fields.related import ( + RECURSIVE_RELATIONSHIP_CONSTANT, + ManyToManyDescriptor, + RelatedField, + create_many_to_many_intermediary_model, +) + + +class CustomManyToManyField(RelatedField): + """ + Ticket #24104 - Need to have a custom ManyToManyField, + which is not an inheritor of ManyToManyField. + """ + + many_to_many = True + + def __init__( + self, + to, + db_constraint=True, + swappable=True, + related_name=None, + related_query_name=None, + limit_choices_to=None, + symmetrical=None, + through=None, + through_fields=None, + db_table=None, + **kwargs, + ): + try: + to._meta + except AttributeError: + to = str(to) + kwargs["rel"] = models.ManyToManyRel( + self, + to, + related_name=related_name, + related_query_name=related_query_name, + limit_choices_to=limit_choices_to, + symmetrical=symmetrical + if symmetrical is not None + else (to == RECURSIVE_RELATIONSHIP_CONSTANT), + through=through, + through_fields=through_fields, + db_constraint=db_constraint, + ) + self.swappable = swappable + self.db_table = db_table + if kwargs["rel"].through is not None and self.db_table is not None: + raise ValueError( + "Cannot specify a db_table if an intermediary model is used." + ) + super().__init__( + related_name=related_name, + related_query_name=related_query_name, + limit_choices_to=limit_choices_to, + **kwargs, + ) + + def contribute_to_class(self, cls, name, **kwargs): + if self.remote_field.symmetrical and ( + self.remote_field.model == "self" + or self.remote_field.model == cls._meta.object_name + ): + self.remote_field.related_name = "%s_rel_+" % name + super().contribute_to_class(cls, name, **kwargs) + if ( + not self.remote_field.through + and not cls._meta.abstract + and not cls._meta.swapped + ): + self.remote_field.through = create_many_to_many_intermediary_model( + self, cls + ) + setattr(cls, self.name, ManyToManyDescriptor(self.remote_field)) + self.m2m_db_table = partial(self._get_m2m_db_table, cls._meta) + + def get_internal_type(self): + return "ManyToManyField" + + # Copy those methods from ManyToManyField because they don't call super() internally + contribute_to_related_class = models.ManyToManyField.__dict__[ + "contribute_to_related_class" + ] + _get_m2m_attr = models.ManyToManyField.__dict__["_get_m2m_attr"] + _get_m2m_reverse_attr = models.ManyToManyField.__dict__["_get_m2m_reverse_attr"] + _get_m2m_db_table = models.ManyToManyField.__dict__["_get_m2m_db_table"] + + +class InheritedManyToManyField(models.ManyToManyField): + pass + + +class MediumBlobField(models.BinaryField): + """ + A MySQL BinaryField that uses a different blob size. + """ + + def db_type(self, connection): + return "MEDIUMBLOB" diff --git a/testbed/django__django/tests/schema/models.py b/testbed/django__django/tests/schema/models.py new file mode 100644 index 0000000000000000000000000000000000000000..75e32a0eabed3542e2a46de2393aa6db86133092 --- /dev/null +++ b/testbed/django__django/tests/schema/models.py @@ -0,0 +1,217 @@ +from django.apps.registry import Apps +from django.db import models + +# Because we want to test creation and deletion of these as separate things, +# these models are all inserted into a separate Apps so the main test +# runner doesn't migrate them. + +new_apps = Apps() + + +class Author(models.Model): + name = models.CharField(max_length=255) + height = models.PositiveIntegerField(null=True, blank=True) + weight = models.IntegerField(null=True, blank=True) + uuid = models.UUIDField(null=True) + + class Meta: + apps = new_apps + + +class AuthorCharFieldWithIndex(models.Model): + char_field = models.CharField(max_length=31, db_index=True) + + class Meta: + apps = new_apps + + +class AuthorTextFieldWithIndex(models.Model): + text_field = models.TextField(db_index=True) + + class Meta: + apps = new_apps + + +class AuthorWithDefaultHeight(models.Model): + name = models.CharField(max_length=255) + height = models.PositiveIntegerField(null=True, blank=True, default=42) + + class Meta: + apps = new_apps + + +class AuthorWithEvenLongerName(models.Model): + name = models.CharField(max_length=255) + height = models.PositiveIntegerField(null=True, blank=True) + + class Meta: + apps = new_apps + + +class AuthorWithIndexedName(models.Model): + name = models.CharField(max_length=255, db_index=True) + + class Meta: + apps = new_apps + + +class AuthorWithUniqueName(models.Model): + name = models.CharField(max_length=255, unique=True) + + class Meta: + apps = new_apps + + +class AuthorWithUniqueNameAndBirthday(models.Model): + name = models.CharField(max_length=255) + birthday = models.DateField() + + class Meta: + apps = new_apps + unique_together = [["name", "birthday"]] + + +class Book(models.Model): + author = models.ForeignKey(Author, models.CASCADE) + title = models.CharField(max_length=100, db_index=True) + pub_date = models.DateTimeField() + # tags = models.ManyToManyField("Tag", related_name="books") + + class Meta: + apps = new_apps + + +class BookWeak(models.Model): + author = models.ForeignKey(Author, models.CASCADE, db_constraint=False) + title = models.CharField(max_length=100, db_index=True) + pub_date = models.DateTimeField() + + class Meta: + apps = new_apps + + +class BookWithLongName(models.Model): + author_foreign_key_with_really_long_field_name = models.ForeignKey( + AuthorWithEvenLongerName, + models.CASCADE, + ) + + class Meta: + apps = new_apps + + +class BookWithO2O(models.Model): + author = models.OneToOneField(Author, models.CASCADE) + title = models.CharField(max_length=100, db_index=True) + pub_date = models.DateTimeField() + + class Meta: + apps = new_apps + db_table = "schema_book" + + +class BookWithSlug(models.Model): + author = models.ForeignKey(Author, models.CASCADE) + title = models.CharField(max_length=100, db_index=True) + pub_date = models.DateTimeField() + slug = models.CharField(max_length=20, unique=True) + + class Meta: + apps = new_apps + db_table = "schema_book" + + +class BookWithoutAuthor(models.Model): + title = models.CharField(max_length=100, db_index=True) + pub_date = models.DateTimeField() + + class Meta: + apps = new_apps + db_table = "schema_book" + + +class BookForeignObj(models.Model): + title = models.CharField(max_length=100, db_index=True) + author_id = models.IntegerField() + + class Meta: + apps = new_apps + + +class IntegerPK(models.Model): + i = models.IntegerField(primary_key=True) + j = models.IntegerField(unique=True) + + class Meta: + apps = new_apps + db_table = "INTEGERPK" # uppercase to ensure proper quoting + + +class Note(models.Model): + info = models.TextField() + address = models.TextField(null=True) + + class Meta: + apps = new_apps + + +class NoteRename(models.Model): + detail_info = models.TextField() + + class Meta: + apps = new_apps + db_table = "schema_note" + + +class Tag(models.Model): + title = models.CharField(max_length=255) + slug = models.SlugField(unique=True) + + class Meta: + apps = new_apps + + +class TagM2MTest(models.Model): + title = models.CharField(max_length=255) + slug = models.SlugField(unique=True) + + class Meta: + apps = new_apps + + +class TagUniqueRename(models.Model): + title = models.CharField(max_length=255) + slug2 = models.SlugField(unique=True) + + class Meta: + apps = new_apps + db_table = "schema_tag" + + +# Based on tests/reserved_names/models.py +class Thing(models.Model): + when = models.CharField(max_length=1, primary_key=True) + + class Meta: + apps = new_apps + db_table = "drop" + + def __str__(self): + return self.when + + +class UniqueTest(models.Model): + year = models.IntegerField() + slug = models.SlugField(unique=False) + + class Meta: + apps = new_apps + unique_together = ["year", "slug"] + + +class Node(models.Model): + node_id = models.AutoField(primary_key=True) + parent = models.ForeignKey("self", models.CASCADE, null=True, blank=True) + + class Meta: + apps = new_apps diff --git a/testbed/django__django/tests/schema/test_logging.py b/testbed/django__django/tests/schema/test_logging.py new file mode 100644 index 0000000000000000000000000000000000000000..9c7069c87438d6c66e8c38ab372ede46d68edca7 --- /dev/null +++ b/testbed/django__django/tests/schema/test_logging.py @@ -0,0 +1,17 @@ +from django.db import connection +from django.test import TestCase + + +class SchemaLoggerTests(TestCase): + def test_extra_args(self): + editor = connection.schema_editor(collect_sql=True) + sql = "SELECT * FROM foo WHERE id in (%s, %s)" + params = [42, 1337] + with self.assertLogs("django.db.backends.schema", "DEBUG") as cm: + editor.execute(sql, params) + if connection.features.schema_editor_uses_clientside_param_binding: + sql = "SELECT * FROM foo WHERE id in (42, 1337)" + params = None + self.assertEqual(cm.records[0].sql, sql) + self.assertEqual(cm.records[0].params, params) + self.assertEqual(cm.records[0].getMessage(), f"{sql}; (params {params})") diff --git a/testbed/django__django/tests/schema/tests.py b/testbed/django__django/tests/schema/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..688a9f1fcf1e9e843ba3a588b85bc5f0061373d1 --- /dev/null +++ b/testbed/django__django/tests/schema/tests.py @@ -0,0 +1,5401 @@ +import datetime +import itertools +import unittest +from copy import copy +from unittest import mock + +from django.core.exceptions import FieldError +from django.core.management.color import no_style +from django.db import ( + DatabaseError, + DataError, + IntegrityError, + OperationalError, + connection, +) +from django.db.backends.utils import truncate_name +from django.db.models import ( + CASCADE, + PROTECT, + AutoField, + BigAutoField, + BigIntegerField, + BinaryField, + BooleanField, + CharField, + CheckConstraint, + DateField, + DateTimeField, + DecimalField, + DurationField, + F, + FloatField, + ForeignKey, + ForeignObject, + Index, + IntegerField, + JSONField, + ManyToManyField, + Model, + OneToOneField, + OrderBy, + PositiveIntegerField, + Q, + SlugField, + SmallAutoField, + SmallIntegerField, + TextField, + TimeField, + UniqueConstraint, + UUIDField, + Value, +) +from django.db.models.fields.json import KeyTextTransform +from django.db.models.functions import Abs, Cast, Collate, Lower, Random, Upper +from django.db.models.indexes import IndexExpression +from django.db.transaction import TransactionManagementError, atomic +from django.test import ( + TransactionTestCase, + ignore_warnings, + skipIfDBFeature, + skipUnlessDBFeature, +) +from django.test.utils import CaptureQueriesContext, isolate_apps, register_lookup +from django.utils.deprecation import RemovedInDjango51Warning + +from .fields import CustomManyToManyField, InheritedManyToManyField, MediumBlobField +from .models import ( + Author, + AuthorCharFieldWithIndex, + AuthorTextFieldWithIndex, + AuthorWithDefaultHeight, + AuthorWithEvenLongerName, + AuthorWithIndexedName, + AuthorWithUniqueName, + AuthorWithUniqueNameAndBirthday, + Book, + BookForeignObj, + BookWeak, + BookWithLongName, + BookWithO2O, + BookWithoutAuthor, + BookWithSlug, + IntegerPK, + Node, + Note, + NoteRename, + Tag, + TagM2MTest, + TagUniqueRename, + Thing, + UniqueTest, + new_apps, +) + + +class SchemaTests(TransactionTestCase): + """ + Tests for the schema-alteration code. + + Be aware that these tests are more liable than most to false results, + as sometimes the code to check if a test has worked is almost as complex + as the code it is testing. + """ + + available_apps = [] + + models = [ + Author, + AuthorCharFieldWithIndex, + AuthorTextFieldWithIndex, + AuthorWithDefaultHeight, + AuthorWithEvenLongerName, + Book, + BookWeak, + BookWithLongName, + BookWithO2O, + BookWithSlug, + IntegerPK, + Node, + Note, + Tag, + TagM2MTest, + TagUniqueRename, + Thing, + UniqueTest, + ] + + # Utility functions + + def setUp(self): + # local_models should contain test dependent model classes that will be + # automatically removed from the app cache on test tear down. + self.local_models = [] + # isolated_local_models contains models that are in test methods + # decorated with @isolate_apps. + self.isolated_local_models = [] + + def tearDown(self): + # Delete any tables made for our models + self.delete_tables() + new_apps.clear_cache() + for model in new_apps.get_models(): + model._meta._expire_cache() + if "schema" in new_apps.all_models: + for model in self.local_models: + for many_to_many in model._meta.many_to_many: + through = many_to_many.remote_field.through + if through and through._meta.auto_created: + del new_apps.all_models["schema"][through._meta.model_name] + del new_apps.all_models["schema"][model._meta.model_name] + if self.isolated_local_models: + with connection.schema_editor() as editor: + for model in self.isolated_local_models: + editor.delete_model(model) + + def delete_tables(self): + "Deletes all model tables for our models for a clean test environment" + converter = connection.introspection.identifier_converter + with connection.schema_editor() as editor: + connection.disable_constraint_checking() + table_names = connection.introspection.table_names() + if connection.features.ignores_table_name_case: + table_names = [table_name.lower() for table_name in table_names] + for model in itertools.chain(SchemaTests.models, self.local_models): + tbl = converter(model._meta.db_table) + if connection.features.ignores_table_name_case: + tbl = tbl.lower() + if tbl in table_names: + editor.delete_model(model) + table_names.remove(tbl) + connection.enable_constraint_checking() + + def column_classes(self, model): + with connection.cursor() as cursor: + columns = { + d[0]: (connection.introspection.get_field_type(d[1], d), d) + for d in connection.introspection.get_table_description( + cursor, + model._meta.db_table, + ) + } + # SQLite has a different format for field_type + for name, (type, desc) in columns.items(): + if isinstance(type, tuple): + columns[name] = (type[0], desc) + return columns + + def get_primary_key(self, table): + with connection.cursor() as cursor: + return connection.introspection.get_primary_key_column(cursor, table) + + def get_indexes(self, table): + """ + Get the indexes on the table using a new cursor. + """ + with connection.cursor() as cursor: + return [ + c["columns"][0] + for c in connection.introspection.get_constraints( + cursor, table + ).values() + if c["index"] and len(c["columns"]) == 1 + ] + + def get_uniques(self, table): + with connection.cursor() as cursor: + return [ + c["columns"][0] + for c in connection.introspection.get_constraints( + cursor, table + ).values() + if c["unique"] and len(c["columns"]) == 1 + ] + + def get_constraints(self, table): + """ + Get the constraints on a table using a new cursor. + """ + with connection.cursor() as cursor: + return connection.introspection.get_constraints(cursor, table) + + def get_constraints_for_column(self, model, column_name): + constraints = self.get_constraints(model._meta.db_table) + constraints_for_column = [] + for name, details in constraints.items(): + if details["columns"] == [column_name]: + constraints_for_column.append(name) + return sorted(constraints_for_column) + + def check_added_field_default( + self, + schema_editor, + model, + field, + field_name, + expected_default, + cast_function=None, + ): + with connection.cursor() as cursor: + schema_editor.add_field(model, field) + cursor.execute( + "SELECT {} FROM {};".format(field_name, model._meta.db_table) + ) + database_default = cursor.fetchall()[0][0] + if cast_function and type(database_default) != type(expected_default): + database_default = cast_function(database_default) + self.assertEqual(database_default, expected_default) + + def get_constraints_count(self, table, column, fk_to): + """ + Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the + number of foreign keys, unique constraints, and indexes on + `table`.`column`. The `fk_to` argument is a 2-tuple specifying the + expected foreign key relationship's (table, column). + """ + with connection.cursor() as cursor: + constraints = connection.introspection.get_constraints(cursor, table) + counts = {"fks": 0, "uniques": 0, "indexes": 0} + for c in constraints.values(): + if c["columns"] == [column]: + if c["foreign_key"] == fk_to: + counts["fks"] += 1 + if c["unique"]: + counts["uniques"] += 1 + elif c["index"]: + counts["indexes"] += 1 + return counts + + def get_column_collation(self, table, column): + with connection.cursor() as cursor: + return next( + f.collation + for f in connection.introspection.get_table_description(cursor, table) + if f.name == column + ) + + def get_column_comment(self, table, column): + with connection.cursor() as cursor: + return next( + f.comment + for f in connection.introspection.get_table_description(cursor, table) + if f.name == column + ) + + def get_table_comment(self, table): + with connection.cursor() as cursor: + return next( + t.comment + for t in connection.introspection.get_table_list(cursor) + if t.name == table + ) + + def assert_column_comment_not_exists(self, table, column): + with connection.cursor() as cursor: + columns = connection.introspection.get_table_description(cursor, table) + self.assertFalse(any([c.name == column and c.comment for c in columns])) + + def assertIndexOrder(self, table, index, order): + constraints = self.get_constraints(table) + self.assertIn(index, constraints) + index_orders = constraints[index]["orders"] + self.assertTrue( + all(val == expected for val, expected in zip(index_orders, order)) + ) + + def assertForeignKeyExists(self, model, column, expected_fk_table, field="id"): + """ + Fail if the FK constraint on `model.Meta.db_table`.`column` to + `expected_fk_table`.id doesn't exist. + """ + if not connection.features.can_introspect_foreign_keys: + return + constraints = self.get_constraints(model._meta.db_table) + constraint_fk = None + for details in constraints.values(): + if details["columns"] == [column] and details["foreign_key"]: + constraint_fk = details["foreign_key"] + break + self.assertEqual(constraint_fk, (expected_fk_table, field)) + + def assertForeignKeyNotExists(self, model, column, expected_fk_table): + if not connection.features.can_introspect_foreign_keys: + return + with self.assertRaises(AssertionError): + self.assertForeignKeyExists(model, column, expected_fk_table) + + # Tests + def test_creation_deletion(self): + """ + Tries creating a model's table, and then deleting it. + """ + with connection.schema_editor() as editor: + # Create the table + editor.create_model(Author) + # The table is there + list(Author.objects.all()) + # Clean up that table + editor.delete_model(Author) + # No deferred SQL should be left over. + self.assertEqual(editor.deferred_sql, []) + # The table is gone + with self.assertRaises(DatabaseError): + list(Author.objects.all()) + + @skipUnlessDBFeature("supports_foreign_keys") + def test_fk(self): + "Creating tables out of FK order, then repointing, works" + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Book) + editor.create_model(Author) + editor.create_model(Tag) + # Initial tables are there + list(Author.objects.all()) + list(Book.objects.all()) + # Make sure the FK constraint is present + with self.assertRaises(IntegrityError): + Book.objects.create( + author_id=1, + title="Much Ado About Foreign Keys", + pub_date=datetime.datetime.now(), + ) + # Repoint the FK constraint + old_field = Book._meta.get_field("author") + new_field = ForeignKey(Tag, CASCADE) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field(Book, old_field, new_field, strict=True) + self.assertForeignKeyExists(Book, "author_id", "schema_tag") + + @skipUnlessDBFeature("can_create_inline_fk") + def test_inline_fk(self): + # Create some tables. + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + editor.create_model(Note) + self.assertForeignKeyNotExists(Note, "book_id", "schema_book") + # Add a foreign key from one to the other. + with connection.schema_editor() as editor: + new_field = ForeignKey(Book, CASCADE) + new_field.set_attributes_from_name("book") + editor.add_field(Note, new_field) + self.assertForeignKeyExists(Note, "book_id", "schema_book") + # Creating a FK field with a constraint uses a single statement without + # a deferred ALTER TABLE. + self.assertFalse( + [ + sql + for sql in (str(statement) for statement in editor.deferred_sql) + if sql.startswith("ALTER TABLE") and "ADD CONSTRAINT" in sql + ] + ) + + @skipUnlessDBFeature("can_create_inline_fk") + def test_add_inline_fk_update_data(self): + with connection.schema_editor() as editor: + editor.create_model(Node) + # Add an inline foreign key and update data in the same transaction. + new_field = ForeignKey(Node, CASCADE, related_name="new_fk", null=True) + new_field.set_attributes_from_name("new_parent_fk") + parent = Node.objects.create() + with connection.schema_editor() as editor: + editor.add_field(Node, new_field) + editor.execute("UPDATE schema_node SET new_parent_fk_id = %s;", [parent.pk]) + assertIndex = ( + self.assertIn + if connection.features.indexes_foreign_keys + else self.assertNotIn + ) + assertIndex("new_parent_fk_id", self.get_indexes(Node._meta.db_table)) + + @skipUnlessDBFeature( + "can_create_inline_fk", + "allows_multiple_constraints_on_same_fields", + ) + @isolate_apps("schema") + def test_add_inline_fk_index_update_data(self): + class Node(Model): + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(Node) + # Add an inline foreign key, update data, and an index in the same + # transaction. + new_field = ForeignKey(Node, CASCADE, related_name="new_fk", null=True) + new_field.set_attributes_from_name("new_parent_fk") + parent = Node.objects.create() + with connection.schema_editor() as editor: + editor.add_field(Node, new_field) + Node._meta.add_field(new_field) + editor.execute("UPDATE schema_node SET new_parent_fk_id = %s;", [parent.pk]) + editor.add_index( + Node, Index(fields=["new_parent_fk"], name="new_parent_inline_fk_idx") + ) + self.assertIn("new_parent_fk_id", self.get_indexes(Node._meta.db_table)) + + @skipUnlessDBFeature("supports_foreign_keys") + def test_char_field_with_db_index_to_fk(self): + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(AuthorCharFieldWithIndex) + # Change CharField to FK + old_field = AuthorCharFieldWithIndex._meta.get_field("char_field") + new_field = ForeignKey(Author, CASCADE, blank=True) + new_field.set_attributes_from_name("char_field") + with connection.schema_editor() as editor: + editor.alter_field( + AuthorCharFieldWithIndex, old_field, new_field, strict=True + ) + self.assertForeignKeyExists( + AuthorCharFieldWithIndex, "char_field_id", "schema_author" + ) + + @skipUnlessDBFeature("supports_foreign_keys") + @skipUnlessDBFeature("supports_index_on_text_field") + def test_text_field_with_db_index_to_fk(self): + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(AuthorTextFieldWithIndex) + # Change TextField to FK + old_field = AuthorTextFieldWithIndex._meta.get_field("text_field") + new_field = ForeignKey(Author, CASCADE, blank=True) + new_field.set_attributes_from_name("text_field") + with connection.schema_editor() as editor: + editor.alter_field( + AuthorTextFieldWithIndex, old_field, new_field, strict=True + ) + self.assertForeignKeyExists( + AuthorTextFieldWithIndex, "text_field_id", "schema_author" + ) + + @isolate_apps("schema") + def test_char_field_pk_to_auto_field(self): + class Foo(Model): + id = CharField(max_length=255, primary_key=True) + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(Foo) + self.isolated_local_models = [Foo] + old_field = Foo._meta.get_field("id") + new_field = AutoField(primary_key=True) + new_field.set_attributes_from_name("id") + new_field.model = Foo + with connection.schema_editor() as editor: + editor.alter_field(Foo, old_field, new_field, strict=True) + + @skipUnlessDBFeature("supports_foreign_keys") + def test_fk_to_proxy(self): + "Creating a FK to a proxy model creates database constraints." + + class AuthorProxy(Author): + class Meta: + app_label = "schema" + apps = new_apps + proxy = True + + class AuthorRef(Model): + author = ForeignKey(AuthorProxy, on_delete=CASCADE) + + class Meta: + app_label = "schema" + apps = new_apps + + self.local_models = [AuthorProxy, AuthorRef] + + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(AuthorRef) + self.assertForeignKeyExists(AuthorRef, "author_id", "schema_author") + + @skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys") + def test_fk_db_constraint(self): + "The db_constraint parameter is respected" + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Tag) + editor.create_model(Author) + editor.create_model(BookWeak) + # Initial tables are there + list(Author.objects.all()) + list(Tag.objects.all()) + list(BookWeak.objects.all()) + self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author") + # Make a db_constraint=False FK + new_field = ForeignKey(Tag, CASCADE, db_constraint=False) + new_field.set_attributes_from_name("tag") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + self.assertForeignKeyNotExists(Author, "tag_id", "schema_tag") + # Alter to one with a constraint + new_field2 = ForeignKey(Tag, CASCADE) + new_field2.set_attributes_from_name("tag") + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, new_field2, strict=True) + self.assertForeignKeyExists(Author, "tag_id", "schema_tag") + # Alter to one without a constraint again + new_field2 = ForeignKey(Tag, CASCADE) + new_field2.set_attributes_from_name("tag") + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field2, new_field, strict=True) + self.assertForeignKeyNotExists(Author, "tag_id", "schema_tag") + + @isolate_apps("schema") + def test_no_db_constraint_added_during_primary_key_change(self): + """ + When a primary key that's pointed to by a ForeignKey with + db_constraint=False is altered, a foreign key constraint isn't added. + """ + + class Author(Model): + class Meta: + app_label = "schema" + + class BookWeak(Model): + author = ForeignKey(Author, CASCADE, db_constraint=False) + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(BookWeak) + self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author") + old_field = Author._meta.get_field("id") + new_field = BigAutoField(primary_key=True) + new_field.model = Author + new_field.set_attributes_from_name("id") + # @isolate_apps() and inner models are needed to have the model + # relations populated, otherwise this doesn't act as a regression test. + self.assertEqual(len(new_field.model._meta.related_objects), 1) + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author") + + def _test_m2m_db_constraint(self, M2MFieldClass): + class LocalAuthorWithM2M(Model): + name = CharField(max_length=255) + + class Meta: + app_label = "schema" + apps = new_apps + + self.local_models = [LocalAuthorWithM2M] + + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Tag) + editor.create_model(LocalAuthorWithM2M) + # Initial tables are there + list(LocalAuthorWithM2M.objects.all()) + list(Tag.objects.all()) + # Make a db_constraint=False FK + new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False) + new_field.contribute_to_class(LocalAuthorWithM2M, "tags") + # Add the field + with connection.schema_editor() as editor: + editor.add_field(LocalAuthorWithM2M, new_field) + self.assertForeignKeyNotExists( + new_field.remote_field.through, "tag_id", "schema_tag" + ) + + @skipUnlessDBFeature("supports_foreign_keys") + def test_m2m_db_constraint(self): + self._test_m2m_db_constraint(ManyToManyField) + + @skipUnlessDBFeature("supports_foreign_keys") + def test_m2m_db_constraint_custom(self): + self._test_m2m_db_constraint(CustomManyToManyField) + + @skipUnlessDBFeature("supports_foreign_keys") + def test_m2m_db_constraint_inherited(self): + self._test_m2m_db_constraint(InheritedManyToManyField) + + def test_add_field(self): + """ + Tests adding fields to models + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure there's no age field + columns = self.column_classes(Author) + self.assertNotIn("age", columns) + # Add the new field + new_field = IntegerField(null=True) + new_field.set_attributes_from_name("age") + with CaptureQueriesContext( + connection + ) as ctx, connection.schema_editor() as editor: + editor.add_field(Author, new_field) + drop_default_sql = editor.sql_alter_column_no_default % { + "column": editor.quote_name(new_field.name), + } + self.assertFalse( + any(drop_default_sql in query["sql"] for query in ctx.captured_queries) + ) + # Table is not rebuilt. + self.assertIs( + any("CREATE TABLE" in query["sql"] for query in ctx.captured_queries), False + ) + self.assertIs( + any("DROP TABLE" in query["sql"] for query in ctx.captured_queries), False + ) + columns = self.column_classes(Author) + self.assertEqual( + columns["age"][0], + connection.features.introspected_field_types["IntegerField"], + ) + self.assertTrue(columns["age"][1][6]) + + def test_add_field_remove_field(self): + """ + Adding a field and removing it removes all deferred sql referring to it. + """ + with connection.schema_editor() as editor: + # Create a table with a unique constraint on the slug field. + editor.create_model(Tag) + # Remove the slug column. + editor.remove_field(Tag, Tag._meta.get_field("slug")) + self.assertEqual(editor.deferred_sql, []) + + def test_add_field_temp_default(self): + """ + Tests adding fields to models with a temporary default + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure there's no age field + columns = self.column_classes(Author) + self.assertNotIn("age", columns) + # Add some rows of data + Author.objects.create(name="Andrew", height=30) + Author.objects.create(name="Andrea") + # Add a not-null field + new_field = CharField(max_length=30, default="Godwin") + new_field.set_attributes_from_name("surname") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + columns = self.column_classes(Author) + self.assertEqual( + columns["surname"][0], + connection.features.introspected_field_types["CharField"], + ) + self.assertEqual( + columns["surname"][1][6], + connection.features.interprets_empty_strings_as_nulls, + ) + + def test_add_field_temp_default_boolean(self): + """ + Tests adding fields to models with a temporary default where + the default is False. (#21783) + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure there's no age field + columns = self.column_classes(Author) + self.assertNotIn("age", columns) + # Add some rows of data + Author.objects.create(name="Andrew", height=30) + Author.objects.create(name="Andrea") + # Add a not-null field + new_field = BooleanField(default=False) + new_field.set_attributes_from_name("awesome") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + columns = self.column_classes(Author) + # BooleanField are stored as TINYINT(1) on MySQL. + field_type = columns["awesome"][0] + self.assertEqual( + field_type, connection.features.introspected_field_types["BooleanField"] + ) + + def test_add_field_default_transform(self): + """ + Tests adding fields to models with a default that is not directly + valid in the database (#22581) + """ + + class TestTransformField(IntegerField): + # Weird field that saves the count of items in its value + def get_default(self): + return self.default + + def get_prep_value(self, value): + if value is None: + return 0 + return len(value) + + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Add some rows of data + Author.objects.create(name="Andrew", height=30) + Author.objects.create(name="Andrea") + # Add the field with a default it needs to cast (to string in this case) + new_field = TestTransformField(default={1: 2}) + new_field.set_attributes_from_name("thing") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + # Ensure the field is there + columns = self.column_classes(Author) + field_type, field_info = columns["thing"] + self.assertEqual( + field_type, connection.features.introspected_field_types["IntegerField"] + ) + # Make sure the values were transformed correctly + self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2) + + def test_add_field_o2o_nullable(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Note) + new_field = OneToOneField(Note, CASCADE, null=True) + new_field.set_attributes_from_name("note") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + columns = self.column_classes(Author) + self.assertIn("note_id", columns) + self.assertTrue(columns["note_id"][1][6]) + + def test_add_field_binary(self): + """ + Tests binary fields get a sane default (#22851) + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Add the new field + new_field = BinaryField(blank=True) + new_field.set_attributes_from_name("bits") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + columns = self.column_classes(Author) + # MySQL annoyingly uses the same backend, so it'll come back as one of + # these two types. + self.assertIn(columns["bits"][0], ("BinaryField", "TextField")) + + def test_add_field_durationfield_with_default(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + new_field = DurationField(default=datetime.timedelta(minutes=10)) + new_field.set_attributes_from_name("duration") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + columns = self.column_classes(Author) + self.assertEqual( + columns["duration"][0], + connection.features.introspected_field_types["DurationField"], + ) + + @unittest.skipUnless(connection.vendor == "mysql", "MySQL specific") + def test_add_binaryfield_mediumblob(self): + """ + Test adding a custom-sized binary field on MySQL (#24846). + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Add the new field with default + new_field = MediumBlobField(blank=True, default=b"123") + new_field.set_attributes_from_name("bits") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + columns = self.column_classes(Author) + # Introspection treats BLOBs as TextFields + self.assertEqual(columns["bits"][0], "TextField") + + @isolate_apps("schema") + def test_add_auto_field(self): + class AddAutoFieldModel(Model): + name = CharField(max_length=255, primary_key=True) + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(AddAutoFieldModel) + self.isolated_local_models = [AddAutoFieldModel] + old_field = AddAutoFieldModel._meta.get_field("name") + new_field = CharField(max_length=255) + new_field.set_attributes_from_name("name") + new_field.model = AddAutoFieldModel + with connection.schema_editor() as editor: + editor.alter_field(AddAutoFieldModel, old_field, new_field) + new_auto_field = AutoField(primary_key=True) + new_auto_field.set_attributes_from_name("id") + new_auto_field.model = AddAutoFieldModel() + with connection.schema_editor() as editor: + editor.add_field(AddAutoFieldModel, new_auto_field) + # Crashes on PostgreSQL when the GENERATED BY suffix is missing. + AddAutoFieldModel.objects.create(name="test") + + def test_remove_field(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + with CaptureQueriesContext(connection) as ctx: + editor.remove_field(Author, Author._meta.get_field("name")) + columns = self.column_classes(Author) + self.assertNotIn("name", columns) + if getattr(connection.features, "can_alter_table_drop_column", True): + # Table is not rebuilt. + self.assertIs( + any("CREATE TABLE" in query["sql"] for query in ctx.captured_queries), + False, + ) + self.assertIs( + any("DROP TABLE" in query["sql"] for query in ctx.captured_queries), + False, + ) + + def test_remove_indexed_field(self): + with connection.schema_editor() as editor: + editor.create_model(AuthorCharFieldWithIndex) + with connection.schema_editor() as editor: + editor.remove_field( + AuthorCharFieldWithIndex, + AuthorCharFieldWithIndex._meta.get_field("char_field"), + ) + columns = self.column_classes(AuthorCharFieldWithIndex) + self.assertNotIn("char_field", columns) + + def test_alter(self): + """ + Tests simple altering of fields + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure the field is right to begin with + columns = self.column_classes(Author) + self.assertEqual( + columns["name"][0], + connection.features.introspected_field_types["CharField"], + ) + self.assertEqual( + bool(columns["name"][1][6]), + bool(connection.features.interprets_empty_strings_as_nulls), + ) + # Alter the name field to a TextField + old_field = Author._meta.get_field("name") + new_field = TextField(null=True) + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + columns = self.column_classes(Author) + self.assertEqual(columns["name"][0], "TextField") + self.assertTrue(columns["name"][1][6]) + # Change nullability again + new_field2 = TextField(null=False) + new_field2.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, new_field2, strict=True) + columns = self.column_classes(Author) + self.assertEqual(columns["name"][0], "TextField") + self.assertEqual( + bool(columns["name"][1][6]), + bool(connection.features.interprets_empty_strings_as_nulls), + ) + + def test_alter_auto_field_to_integer_field(self): + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Change AutoField to IntegerField + old_field = Author._meta.get_field("id") + new_field = IntegerField(primary_key=True) + new_field.set_attributes_from_name("id") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + # Now that ID is an IntegerField, the database raises an error if it + # isn't provided. + if not connection.features.supports_unspecified_pk: + with self.assertRaises(DatabaseError): + Author.objects.create() + + def test_alter_auto_field_to_char_field(self): + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Change AutoField to CharField + old_field = Author._meta.get_field("id") + new_field = CharField(primary_key=True, max_length=50) + new_field.set_attributes_from_name("id") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + + @isolate_apps("schema") + def test_alter_auto_field_quoted_db_column(self): + class Foo(Model): + id = AutoField(primary_key=True, db_column='"quoted_id"') + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(Foo) + self.isolated_local_models = [Foo] + old_field = Foo._meta.get_field("id") + new_field = BigAutoField(primary_key=True) + new_field.model = Foo + new_field.db_column = '"quoted_id"' + new_field.set_attributes_from_name("id") + with connection.schema_editor() as editor: + editor.alter_field(Foo, old_field, new_field, strict=True) + Foo.objects.create() + + def test_alter_not_unique_field_to_primary_key(self): + # Create the table. + with connection.schema_editor() as editor: + editor.create_model(Author) + # Change UUIDField to primary key. + old_field = Author._meta.get_field("uuid") + new_field = UUIDField(primary_key=True) + new_field.set_attributes_from_name("uuid") + new_field.model = Author + with connection.schema_editor() as editor: + editor.remove_field(Author, Author._meta.get_field("id")) + editor.alter_field(Author, old_field, new_field, strict=True) + # Redundant unique constraint is not added. + count = self.get_constraints_count( + Author._meta.db_table, + Author._meta.get_field("uuid").column, + None, + ) + self.assertLessEqual(count["uniques"], 1) + + @isolate_apps("schema") + def test_alter_primary_key_quoted_db_table(self): + class Foo(Model): + class Meta: + app_label = "schema" + db_table = '"foo"' + + with connection.schema_editor() as editor: + editor.create_model(Foo) + self.isolated_local_models = [Foo] + old_field = Foo._meta.get_field("id") + new_field = BigAutoField(primary_key=True) + new_field.model = Foo + new_field.set_attributes_from_name("id") + with connection.schema_editor() as editor: + editor.alter_field(Foo, old_field, new_field, strict=True) + Foo.objects.create() + + def test_alter_text_field(self): + # Regression for "BLOB/TEXT column 'info' can't have a default value") + # on MySQL. + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Note) + old_field = Note._meta.get_field("info") + new_field = TextField(blank=True) + new_field.set_attributes_from_name("info") + with connection.schema_editor() as editor: + editor.alter_field(Note, old_field, new_field, strict=True) + + def test_alter_text_field_to_not_null_with_default_value(self): + with connection.schema_editor() as editor: + editor.create_model(Note) + old_field = Note._meta.get_field("address") + new_field = TextField(blank=True, default="", null=False) + new_field.set_attributes_from_name("address") + with connection.schema_editor() as editor: + editor.alter_field(Note, old_field, new_field, strict=True) + + @skipUnlessDBFeature("can_defer_constraint_checks", "can_rollback_ddl") + def test_alter_fk_checks_deferred_constraints(self): + """ + #25492 - Altering a foreign key's structure and data in the same + transaction. + """ + with connection.schema_editor() as editor: + editor.create_model(Node) + old_field = Node._meta.get_field("parent") + new_field = ForeignKey(Node, CASCADE) + new_field.set_attributes_from_name("parent") + parent = Node.objects.create() + with connection.schema_editor() as editor: + # Update the parent FK to create a deferred constraint check. + Node.objects.update(parent=parent) + editor.alter_field(Node, old_field, new_field, strict=True) + + @isolate_apps("schema") + def test_alter_null_with_default_value_deferred_constraints(self): + class Publisher(Model): + class Meta: + app_label = "schema" + + class Article(Model): + publisher = ForeignKey(Publisher, CASCADE) + title = CharField(max_length=50, null=True) + description = CharField(max_length=100, null=True) + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(Publisher) + editor.create_model(Article) + self.isolated_local_models = [Article, Publisher] + + publisher = Publisher.objects.create() + Article.objects.create(publisher=publisher) + + old_title = Article._meta.get_field("title") + new_title = CharField(max_length=50, null=False, default="") + new_title.set_attributes_from_name("title") + old_description = Article._meta.get_field("description") + new_description = CharField(max_length=100, null=False, default="") + new_description.set_attributes_from_name("description") + with connection.schema_editor() as editor: + editor.alter_field(Article, old_title, new_title, strict=True) + editor.alter_field(Article, old_description, new_description, strict=True) + + def test_alter_text_field_to_date_field(self): + """ + #25002 - Test conversion of text field to date field. + """ + with connection.schema_editor() as editor: + editor.create_model(Note) + Note.objects.create(info="1988-05-05") + old_field = Note._meta.get_field("info") + new_field = DateField(blank=True) + new_field.set_attributes_from_name("info") + with connection.schema_editor() as editor: + editor.alter_field(Note, old_field, new_field, strict=True) + # Make sure the field isn't nullable + columns = self.column_classes(Note) + self.assertFalse(columns["info"][1][6]) + + def test_alter_text_field_to_datetime_field(self): + """ + #25002 - Test conversion of text field to datetime field. + """ + with connection.schema_editor() as editor: + editor.create_model(Note) + Note.objects.create(info="1988-05-05 3:16:17.4567") + old_field = Note._meta.get_field("info") + new_field = DateTimeField(blank=True) + new_field.set_attributes_from_name("info") + with connection.schema_editor() as editor: + editor.alter_field(Note, old_field, new_field, strict=True) + # Make sure the field isn't nullable + columns = self.column_classes(Note) + self.assertFalse(columns["info"][1][6]) + + def test_alter_text_field_to_time_field(self): + """ + #25002 - Test conversion of text field to time field. + """ + with connection.schema_editor() as editor: + editor.create_model(Note) + Note.objects.create(info="3:16:17.4567") + old_field = Note._meta.get_field("info") + new_field = TimeField(blank=True) + new_field.set_attributes_from_name("info") + with connection.schema_editor() as editor: + editor.alter_field(Note, old_field, new_field, strict=True) + # Make sure the field isn't nullable + columns = self.column_classes(Note) + self.assertFalse(columns["info"][1][6]) + + @skipIfDBFeature("interprets_empty_strings_as_nulls") + def test_alter_textual_field_keep_null_status(self): + """ + Changing a field type shouldn't affect the not null status. + """ + with connection.schema_editor() as editor: + editor.create_model(Note) + with self.assertRaises(IntegrityError): + Note.objects.create(info=None) + old_field = Note._meta.get_field("info") + new_field = CharField(max_length=50) + new_field.set_attributes_from_name("info") + with connection.schema_editor() as editor: + editor.alter_field(Note, old_field, new_field, strict=True) + with self.assertRaises(IntegrityError): + Note.objects.create(info=None) + + @skipUnlessDBFeature("interprets_empty_strings_as_nulls") + def test_alter_textual_field_not_null_to_null(self): + """ + Nullability for textual fields is preserved on databases that + interpret empty strings as NULLs. + """ + with connection.schema_editor() as editor: + editor.create_model(Author) + columns = self.column_classes(Author) + # Field is nullable. + self.assertTrue(columns["uuid"][1][6]) + # Change to NOT NULL. + old_field = Author._meta.get_field("uuid") + new_field = SlugField(null=False, blank=True) + new_field.set_attributes_from_name("uuid") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + columns = self.column_classes(Author) + # Nullability is preserved. + self.assertTrue(columns["uuid"][1][6]) + + def test_alter_numeric_field_keep_null_status(self): + """ + Changing a field type shouldn't affect the not null status. + """ + with connection.schema_editor() as editor: + editor.create_model(UniqueTest) + with self.assertRaises(IntegrityError): + UniqueTest.objects.create(year=None, slug="aaa") + old_field = UniqueTest._meta.get_field("year") + new_field = BigIntegerField() + new_field.set_attributes_from_name("year") + with connection.schema_editor() as editor: + editor.alter_field(UniqueTest, old_field, new_field, strict=True) + with self.assertRaises(IntegrityError): + UniqueTest.objects.create(year=None, slug="bbb") + + def test_alter_null_to_not_null(self): + """ + #23609 - Tests handling of default values when altering from NULL to NOT NULL. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure the field is right to begin with + columns = self.column_classes(Author) + self.assertTrue(columns["height"][1][6]) + # Create some test data + Author.objects.create(name="Not null author", height=12) + Author.objects.create(name="Null author") + # Verify null value + self.assertEqual(Author.objects.get(name="Not null author").height, 12) + self.assertIsNone(Author.objects.get(name="Null author").height) + # Alter the height field to NOT NULL with default + old_field = Author._meta.get_field("height") + new_field = PositiveIntegerField(default=42) + new_field.set_attributes_from_name("height") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + columns = self.column_classes(Author) + self.assertFalse(columns["height"][1][6]) + # Verify default value + self.assertEqual(Author.objects.get(name="Not null author").height, 12) + self.assertEqual(Author.objects.get(name="Null author").height, 42) + + def test_alter_charfield_to_null(self): + """ + #24307 - Should skip an alter statement on databases with + interprets_empty_strings_as_nulls when changing a CharField to null. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Change the CharField to null + old_field = Author._meta.get_field("name") + new_field = copy(old_field) + new_field.null = True + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_char_field_decrease_length(self): + # Create the table. + with connection.schema_editor() as editor: + editor.create_model(Author) + Author.objects.create(name="x" * 255) + # Change max_length of CharField. + old_field = Author._meta.get_field("name") + new_field = CharField(max_length=254) + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + msg = "value too long for type character varying(254)" + with self.assertRaisesMessage(DataError, msg): + editor.alter_field(Author, old_field, new_field, strict=True) + + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_field_with_custom_db_type(self): + from django.contrib.postgres.fields import ArrayField + + class Foo(Model): + field = ArrayField(CharField(max_length=255)) + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(Foo) + self.isolated_local_models = [Foo] + old_field = Foo._meta.get_field("field") + new_field = ArrayField(CharField(max_length=16)) + new_field.set_attributes_from_name("field") + new_field.model = Foo + with connection.schema_editor() as editor: + editor.alter_field(Foo, old_field, new_field, strict=True) + + @isolate_apps("schema") + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_array_field_decrease_base_field_length(self): + from django.contrib.postgres.fields import ArrayField + + class ArrayModel(Model): + field = ArrayField(CharField(max_length=16)) + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(ArrayModel) + self.isolated_local_models = [ArrayModel] + ArrayModel.objects.create(field=["x" * 16]) + old_field = ArrayModel._meta.get_field("field") + new_field = ArrayField(CharField(max_length=15)) + new_field.set_attributes_from_name("field") + new_field.model = ArrayModel + with connection.schema_editor() as editor: + msg = "value too long for type character varying(15)" + with self.assertRaisesMessage(DataError, msg): + editor.alter_field(ArrayModel, old_field, new_field, strict=True) + + @isolate_apps("schema") + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_array_field_decrease_nested_base_field_length(self): + from django.contrib.postgres.fields import ArrayField + + class ArrayModel(Model): + field = ArrayField(ArrayField(CharField(max_length=16))) + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(ArrayModel) + self.isolated_local_models = [ArrayModel] + ArrayModel.objects.create(field=[["x" * 16]]) + old_field = ArrayModel._meta.get_field("field") + new_field = ArrayField(ArrayField(CharField(max_length=15))) + new_field.set_attributes_from_name("field") + new_field.model = ArrayModel + with connection.schema_editor() as editor: + msg = "value too long for type character varying(15)" + with self.assertRaisesMessage(DataError, msg): + editor.alter_field(ArrayModel, old_field, new_field, strict=True) + + def _add_ci_collation(self): + ci_collation = "case_insensitive" + + def drop_collation(): + with connection.cursor() as cursor: + cursor.execute(f"DROP COLLATION IF EXISTS {ci_collation}") + + with connection.cursor() as cursor: + cursor.execute( + f"CREATE COLLATION IF NOT EXISTS {ci_collation} (provider=icu, " + f"locale='und-u-ks-level2', deterministic=false)" + ) + self.addCleanup(drop_collation) + return ci_collation + + @isolate_apps("schema") + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + @skipUnlessDBFeature( + "supports_collation_on_charfield", + "supports_non_deterministic_collations", + ) + def test_db_collation_arrayfield(self): + from django.contrib.postgres.fields import ArrayField + + ci_collation = self._add_ci_collation() + cs_collation = "en-x-icu" + + class ArrayModel(Model): + field = ArrayField(CharField(max_length=16, db_collation=ci_collation)) + + class Meta: + app_label = "schema" + + # Create the table. + with connection.schema_editor() as editor: + editor.create_model(ArrayModel) + self.isolated_local_models = [ArrayModel] + self.assertEqual( + self.get_column_collation(ArrayModel._meta.db_table, "field"), + ci_collation, + ) + # Alter collation. + old_field = ArrayModel._meta.get_field("field") + new_field_cs = ArrayField(CharField(max_length=16, db_collation=cs_collation)) + new_field_cs.set_attributes_from_name("field") + new_field_cs.model = ArrayField + with connection.schema_editor() as editor: + editor.alter_field(ArrayModel, old_field, new_field_cs, strict=True) + self.assertEqual( + self.get_column_collation(ArrayModel._meta.db_table, "field"), + cs_collation, + ) + + @isolate_apps("schema") + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + @skipUnlessDBFeature( + "supports_collation_on_charfield", + "supports_non_deterministic_collations", + ) + def test_unique_with_collation_charfield(self): + ci_collation = self._add_ci_collation() + + class CiCharModel(Model): + field = CharField(max_length=16, db_collation=ci_collation, unique=True) + + class Meta: + app_label = "schema" + + # Create the table. + with connection.schema_editor() as editor: + editor.create_model(CiCharModel) + self.isolated_local_models = [CiCharModel] + self.assertEqual( + self.get_column_collation(CiCharModel._meta.db_table, "field"), + ci_collation, + ) + self.assertIn("field", self.get_uniques(CiCharModel._meta.db_table)) + + @isolate_apps("schema") + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + @skipUnlessDBFeature( + "supports_collation_on_charfield", + "supports_non_deterministic_collations", + ) + def test_relation_to_collation_charfield(self): + ci_collation = self._add_ci_collation() + + class CiCharModel(Model): + field = CharField(max_length=16, db_collation=ci_collation, unique=True) + + class Meta: + app_label = "schema" + + class RelationModel(Model): + field = OneToOneField(CiCharModel, CASCADE, to_field="field") + + class Meta: + app_label = "schema" + + # Create the table. + with connection.schema_editor() as editor: + editor.create_model(CiCharModel) + editor.create_model(RelationModel) + self.isolated_local_models = [CiCharModel, RelationModel] + self.assertEqual( + self.get_column_collation(RelationModel._meta.db_table, "field_id"), + ci_collation, + ) + self.assertEqual( + self.get_column_collation(CiCharModel._meta.db_table, "field"), + ci_collation, + ) + self.assertIn("field_id", self.get_uniques(RelationModel._meta.db_table)) + + def test_alter_textfield_to_null(self): + """ + #24307 - Should skip an alter statement on databases with + interprets_empty_strings_as_nulls when changing a TextField to null. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Note) + # Change the TextField to null + old_field = Note._meta.get_field("info") + new_field = copy(old_field) + new_field.null = True + with connection.schema_editor() as editor: + editor.alter_field(Note, old_field, new_field, strict=True) + + def test_alter_null_to_not_null_keeping_default(self): + """ + #23738 - Can change a nullable field with default to non-nullable + with the same default. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(AuthorWithDefaultHeight) + # Ensure the field is right to begin with + columns = self.column_classes(AuthorWithDefaultHeight) + self.assertTrue(columns["height"][1][6]) + # Alter the height field to NOT NULL keeping the previous default + old_field = AuthorWithDefaultHeight._meta.get_field("height") + new_field = PositiveIntegerField(default=42) + new_field.set_attributes_from_name("height") + with connection.schema_editor() as editor: + editor.alter_field( + AuthorWithDefaultHeight, old_field, new_field, strict=True + ) + columns = self.column_classes(AuthorWithDefaultHeight) + self.assertFalse(columns["height"][1][6]) + + @skipUnlessDBFeature("supports_foreign_keys") + def test_alter_fk(self): + """ + Tests altering of FKs + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + # Ensure the field is right to begin with + columns = self.column_classes(Book) + self.assertEqual( + columns["author_id"][0], + connection.features.introspected_field_types["IntegerField"], + ) + self.assertForeignKeyExists(Book, "author_id", "schema_author") + # Alter the FK + old_field = Book._meta.get_field("author") + new_field = ForeignKey(Author, CASCADE, editable=False) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field(Book, old_field, new_field, strict=True) + columns = self.column_classes(Book) + self.assertEqual( + columns["author_id"][0], + connection.features.introspected_field_types["IntegerField"], + ) + self.assertForeignKeyExists(Book, "author_id", "schema_author") + + @skipUnlessDBFeature("supports_foreign_keys") + def test_alter_to_fk(self): + """ + #24447 - Tests adding a FK constraint for an existing column + """ + + class LocalBook(Model): + author = IntegerField() + title = CharField(max_length=100, db_index=True) + pub_date = DateTimeField() + + class Meta: + app_label = "schema" + apps = new_apps + + self.local_models = [LocalBook] + + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(LocalBook) + # Ensure no FK constraint exists + constraints = self.get_constraints(LocalBook._meta.db_table) + for details in constraints.values(): + if details["foreign_key"]: + self.fail( + "Found an unexpected FK constraint to %s" % details["columns"] + ) + old_field = LocalBook._meta.get_field("author") + new_field = ForeignKey(Author, CASCADE) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field(LocalBook, old_field, new_field, strict=True) + self.assertForeignKeyExists(LocalBook, "author_id", "schema_author") + + @skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys") + def test_alter_o2o_to_fk(self): + """ + #24163 - Tests altering of OneToOneField to ForeignKey + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(BookWithO2O) + # Ensure the field is right to begin with + columns = self.column_classes(BookWithO2O) + self.assertEqual( + columns["author_id"][0], + connection.features.introspected_field_types["IntegerField"], + ) + # Ensure the field is unique + author = Author.objects.create(name="Joe") + BookWithO2O.objects.create( + author=author, title="Django 1", pub_date=datetime.datetime.now() + ) + with self.assertRaises(IntegrityError): + BookWithO2O.objects.create( + author=author, title="Django 2", pub_date=datetime.datetime.now() + ) + BookWithO2O.objects.all().delete() + self.assertForeignKeyExists(BookWithO2O, "author_id", "schema_author") + # Alter the OneToOneField to ForeignKey + old_field = BookWithO2O._meta.get_field("author") + new_field = ForeignKey(Author, CASCADE) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field(BookWithO2O, old_field, new_field, strict=True) + columns = self.column_classes(Book) + self.assertEqual( + columns["author_id"][0], + connection.features.introspected_field_types["IntegerField"], + ) + # Ensure the field is not unique anymore + Book.objects.create( + author=author, title="Django 1", pub_date=datetime.datetime.now() + ) + Book.objects.create( + author=author, title="Django 2", pub_date=datetime.datetime.now() + ) + self.assertForeignKeyExists(Book, "author_id", "schema_author") + + @skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys") + def test_alter_fk_to_o2o(self): + """ + #24163 - Tests altering of ForeignKey to OneToOneField + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + # Ensure the field is right to begin with + columns = self.column_classes(Book) + self.assertEqual( + columns["author_id"][0], + connection.features.introspected_field_types["IntegerField"], + ) + # Ensure the field is not unique + author = Author.objects.create(name="Joe") + Book.objects.create( + author=author, title="Django 1", pub_date=datetime.datetime.now() + ) + Book.objects.create( + author=author, title="Django 2", pub_date=datetime.datetime.now() + ) + Book.objects.all().delete() + self.assertForeignKeyExists(Book, "author_id", "schema_author") + # Alter the ForeignKey to OneToOneField + old_field = Book._meta.get_field("author") + new_field = OneToOneField(Author, CASCADE) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field(Book, old_field, new_field, strict=True) + columns = self.column_classes(BookWithO2O) + self.assertEqual( + columns["author_id"][0], + connection.features.introspected_field_types["IntegerField"], + ) + # Ensure the field is unique now + BookWithO2O.objects.create( + author=author, title="Django 1", pub_date=datetime.datetime.now() + ) + with self.assertRaises(IntegrityError): + BookWithO2O.objects.create( + author=author, title="Django 2", pub_date=datetime.datetime.now() + ) + self.assertForeignKeyExists(BookWithO2O, "author_id", "schema_author") + + def test_alter_field_fk_to_o2o(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + expected_fks = ( + 1 + if connection.features.supports_foreign_keys + and connection.features.can_introspect_foreign_keys + else 0 + ) + expected_indexes = 1 if connection.features.indexes_foreign_keys else 0 + + # Check the index is right to begin with. + counts = self.get_constraints_count( + Book._meta.db_table, + Book._meta.get_field("author").column, + (Author._meta.db_table, Author._meta.pk.column), + ) + self.assertEqual( + counts, + {"fks": expected_fks, "uniques": 0, "indexes": expected_indexes}, + ) + + old_field = Book._meta.get_field("author") + new_field = OneToOneField(Author, CASCADE) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field(Book, old_field, new_field) + + counts = self.get_constraints_count( + Book._meta.db_table, + Book._meta.get_field("author").column, + (Author._meta.db_table, Author._meta.pk.column), + ) + # The index on ForeignKey is replaced with a unique constraint for + # OneToOneField. + self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0}) + + def test_autofield_to_o2o(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Note) + + # Rename the field. + old_field = Author._meta.get_field("id") + new_field = AutoField(primary_key=True) + new_field.set_attributes_from_name("note_ptr") + new_field.model = Author + + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + # Alter AutoField to OneToOneField. + new_field_o2o = OneToOneField(Note, CASCADE) + new_field_o2o.set_attributes_from_name("note_ptr") + new_field_o2o.model = Author + + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, new_field_o2o, strict=True) + columns = self.column_classes(Author) + field_type, _ = columns["note_ptr_id"] + self.assertEqual( + field_type, connection.features.introspected_field_types["IntegerField"] + ) + + def test_alter_field_fk_keeps_index(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + expected_fks = ( + 1 + if connection.features.supports_foreign_keys + and connection.features.can_introspect_foreign_keys + else 0 + ) + expected_indexes = 1 if connection.features.indexes_foreign_keys else 0 + + # Check the index is right to begin with. + counts = self.get_constraints_count( + Book._meta.db_table, + Book._meta.get_field("author").column, + (Author._meta.db_table, Author._meta.pk.column), + ) + self.assertEqual( + counts, + {"fks": expected_fks, "uniques": 0, "indexes": expected_indexes}, + ) + + old_field = Book._meta.get_field("author") + # on_delete changed from CASCADE. + new_field = ForeignKey(Author, PROTECT) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field(Book, old_field, new_field, strict=True) + + counts = self.get_constraints_count( + Book._meta.db_table, + Book._meta.get_field("author").column, + (Author._meta.db_table, Author._meta.pk.column), + ) + # The index remains. + self.assertEqual( + counts, + {"fks": expected_fks, "uniques": 0, "indexes": expected_indexes}, + ) + + def test_alter_field_o2o_to_fk(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(BookWithO2O) + expected_fks = ( + 1 + if connection.features.supports_foreign_keys + and connection.features.can_introspect_foreign_keys + else 0 + ) + + # Check the unique constraint is right to begin with. + counts = self.get_constraints_count( + BookWithO2O._meta.db_table, + BookWithO2O._meta.get_field("author").column, + (Author._meta.db_table, Author._meta.pk.column), + ) + self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0}) + + old_field = BookWithO2O._meta.get_field("author") + new_field = ForeignKey(Author, CASCADE) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field(BookWithO2O, old_field, new_field) + + counts = self.get_constraints_count( + BookWithO2O._meta.db_table, + BookWithO2O._meta.get_field("author").column, + (Author._meta.db_table, Author._meta.pk.column), + ) + # The unique constraint on OneToOneField is replaced with an index for + # ForeignKey. + self.assertEqual(counts, {"fks": expected_fks, "uniques": 0, "indexes": 1}) + + def test_alter_field_o2o_keeps_unique(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(BookWithO2O) + expected_fks = ( + 1 + if connection.features.supports_foreign_keys + and connection.features.can_introspect_foreign_keys + else 0 + ) + + # Check the unique constraint is right to begin with. + counts = self.get_constraints_count( + BookWithO2O._meta.db_table, + BookWithO2O._meta.get_field("author").column, + (Author._meta.db_table, Author._meta.pk.column), + ) + self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0}) + + old_field = BookWithO2O._meta.get_field("author") + # on_delete changed from CASCADE. + new_field = OneToOneField(Author, PROTECT) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field(BookWithO2O, old_field, new_field, strict=True) + + counts = self.get_constraints_count( + BookWithO2O._meta.db_table, + BookWithO2O._meta.get_field("author").column, + (Author._meta.db_table, Author._meta.pk.column), + ) + # The unique constraint remains. + self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0}) + + @skipUnlessDBFeature("ignores_table_name_case") + def test_alter_db_table_case(self): + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Alter the case of the table + old_table_name = Author._meta.db_table + with connection.schema_editor() as editor: + editor.alter_db_table(Author, old_table_name, old_table_name.upper()) + + def test_alter_implicit_id_to_explicit(self): + """ + Should be able to convert an implicit "id" field to an explicit "id" + primary key field. + """ + with connection.schema_editor() as editor: + editor.create_model(Author) + + old_field = Author._meta.get_field("id") + new_field = AutoField(primary_key=True) + new_field.set_attributes_from_name("id") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + # This will fail if DROP DEFAULT is inadvertently executed on this + # field which drops the id sequence, at least on PostgreSQL. + Author.objects.create(name="Foo") + Author.objects.create(name="Bar") + + def test_alter_autofield_pk_to_bigautofield_pk(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + old_field = Author._meta.get_field("id") + new_field = BigAutoField(primary_key=True) + new_field.set_attributes_from_name("id") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + + Author.objects.create(name="Foo", pk=1) + with connection.cursor() as cursor: + sequence_reset_sqls = connection.ops.sequence_reset_sql( + no_style(), [Author] + ) + if sequence_reset_sqls: + cursor.execute(sequence_reset_sqls[0]) + self.assertIsNotNone(Author.objects.create(name="Bar")) + + def test_alter_autofield_pk_to_smallautofield_pk(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + old_field = Author._meta.get_field("id") + new_field = SmallAutoField(primary_key=True) + new_field.set_attributes_from_name("id") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + + Author.objects.create(name="Foo", pk=1) + with connection.cursor() as cursor: + sequence_reset_sqls = connection.ops.sequence_reset_sql( + no_style(), [Author] + ) + if sequence_reset_sqls: + cursor.execute(sequence_reset_sqls[0]) + self.assertIsNotNone(Author.objects.create(name="Bar")) + + def test_alter_int_pk_to_autofield_pk(self): + """ + Should be able to rename an IntegerField(primary_key=True) to + AutoField(primary_key=True). + """ + with connection.schema_editor() as editor: + editor.create_model(IntegerPK) + + old_field = IntegerPK._meta.get_field("i") + new_field = AutoField(primary_key=True) + new_field.model = IntegerPK + new_field.set_attributes_from_name("i") + + with connection.schema_editor() as editor: + editor.alter_field(IntegerPK, old_field, new_field, strict=True) + + # A model representing the updated model. + class IntegerPKToAutoField(Model): + i = AutoField(primary_key=True) + j = IntegerField(unique=True) + + class Meta: + app_label = "schema" + apps = new_apps + db_table = IntegerPK._meta.db_table + + # An id (i) is generated by the database. + obj = IntegerPKToAutoField.objects.create(j=1) + self.assertIsNotNone(obj.i) + + def test_alter_int_pk_to_bigautofield_pk(self): + """ + Should be able to rename an IntegerField(primary_key=True) to + BigAutoField(primary_key=True). + """ + with connection.schema_editor() as editor: + editor.create_model(IntegerPK) + + old_field = IntegerPK._meta.get_field("i") + new_field = BigAutoField(primary_key=True) + new_field.model = IntegerPK + new_field.set_attributes_from_name("i") + + with connection.schema_editor() as editor: + editor.alter_field(IntegerPK, old_field, new_field, strict=True) + + # A model representing the updated model. + class IntegerPKToBigAutoField(Model): + i = BigAutoField(primary_key=True) + j = IntegerField(unique=True) + + class Meta: + app_label = "schema" + apps = new_apps + db_table = IntegerPK._meta.db_table + + # An id (i) is generated by the database. + obj = IntegerPKToBigAutoField.objects.create(j=1) + self.assertIsNotNone(obj.i) + + @isolate_apps("schema") + def test_alter_smallint_pk_to_smallautofield_pk(self): + """ + Should be able to rename an SmallIntegerField(primary_key=True) to + SmallAutoField(primary_key=True). + """ + + class SmallIntegerPK(Model): + i = SmallIntegerField(primary_key=True) + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(SmallIntegerPK) + self.isolated_local_models = [SmallIntegerPK] + old_field = SmallIntegerPK._meta.get_field("i") + new_field = SmallAutoField(primary_key=True) + new_field.model = SmallIntegerPK + new_field.set_attributes_from_name("i") + with connection.schema_editor() as editor: + editor.alter_field(SmallIntegerPK, old_field, new_field, strict=True) + + @isolate_apps("schema") + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_serial_auto_field_to_bigautofield(self): + class SerialAutoField(Model): + id = SmallAutoField(primary_key=True) + + class Meta: + app_label = "schema" + + table = SerialAutoField._meta.db_table + column = SerialAutoField._meta.get_field("id").column + with connection.cursor() as cursor: + cursor.execute( + f'CREATE TABLE "{table}" ' + f'("{column}" smallserial NOT NULL PRIMARY KEY)' + ) + try: + old_field = SerialAutoField._meta.get_field("id") + new_field = BigAutoField(primary_key=True) + new_field.model = SerialAutoField + new_field.set_attributes_from_name("id") + with connection.schema_editor() as editor: + editor.alter_field(SerialAutoField, old_field, new_field, strict=True) + sequence_name = f"{table}_{column}_seq" + with connection.cursor() as cursor: + cursor.execute( + "SELECT data_type FROM pg_sequences WHERE sequencename = %s", + [sequence_name], + ) + row = cursor.fetchone() + sequence_data_type = row[0] if row and row[0] else None + self.assertEqual(sequence_data_type, "bigint") + # Rename the column. + old_field = new_field + new_field = AutoField(primary_key=True) + new_field.model = SerialAutoField + new_field.set_attributes_from_name("renamed_id") + with connection.schema_editor() as editor: + editor.alter_field(SerialAutoField, old_field, new_field, strict=True) + with connection.cursor() as cursor: + cursor.execute( + "SELECT data_type FROM pg_sequences WHERE sequencename = %s", + [sequence_name], + ) + row = cursor.fetchone() + sequence_data_type = row[0] if row and row[0] else None + self.assertEqual(sequence_data_type, "integer") + finally: + with connection.cursor() as cursor: + cursor.execute(f'DROP TABLE "{table}"') + + def test_alter_int_pk_to_int_unique(self): + """ + Should be able to rename an IntegerField(primary_key=True) to + IntegerField(unique=True). + """ + with connection.schema_editor() as editor: + editor.create_model(IntegerPK) + # Delete the old PK + old_field = IntegerPK._meta.get_field("i") + new_field = IntegerField(unique=True) + new_field.model = IntegerPK + new_field.set_attributes_from_name("i") + with connection.schema_editor() as editor: + editor.alter_field(IntegerPK, old_field, new_field, strict=True) + # The primary key constraint is gone. Result depends on database: + # 'id' for SQLite, None for others (must not be 'i'). + self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ("id", None)) + + # Set up a model class as it currently stands. The original IntegerPK + # class is now out of date and some backends make use of the whole + # model class when modifying a field (such as sqlite3 when remaking a + # table) so an outdated model class leads to incorrect results. + class Transitional(Model): + i = IntegerField(unique=True) + j = IntegerField(unique=True) + + class Meta: + app_label = "schema" + apps = new_apps + db_table = "INTEGERPK" + + # model requires a new PK + old_field = Transitional._meta.get_field("j") + new_field = IntegerField(primary_key=True) + new_field.model = Transitional + new_field.set_attributes_from_name("j") + + with connection.schema_editor() as editor: + editor.alter_field(Transitional, old_field, new_field, strict=True) + + # Create a model class representing the updated model. + class IntegerUnique(Model): + i = IntegerField(unique=True) + j = IntegerField(primary_key=True) + + class Meta: + app_label = "schema" + apps = new_apps + db_table = "INTEGERPK" + + # Ensure unique constraint works. + IntegerUnique.objects.create(i=1, j=1) + with self.assertRaises(IntegrityError): + IntegerUnique.objects.create(i=1, j=2) + + def test_rename(self): + """ + Tests simple altering of fields + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure the field is right to begin with + columns = self.column_classes(Author) + self.assertEqual( + columns["name"][0], + connection.features.introspected_field_types["CharField"], + ) + self.assertNotIn("display_name", columns) + # Alter the name field's name + old_field = Author._meta.get_field("name") + new_field = CharField(max_length=254) + new_field.set_attributes_from_name("display_name") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + columns = self.column_classes(Author) + self.assertEqual( + columns["display_name"][0], + connection.features.introspected_field_types["CharField"], + ) + self.assertNotIn("name", columns) + + @isolate_apps("schema") + def test_rename_referenced_field(self): + class Author(Model): + name = CharField(max_length=255, unique=True) + + class Meta: + app_label = "schema" + + class Book(Model): + author = ForeignKey(Author, CASCADE, to_field="name") + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + new_field = CharField(max_length=255, unique=True) + new_field.set_attributes_from_name("renamed") + with connection.schema_editor( + atomic=connection.features.supports_atomic_references_rename + ) as editor: + editor.alter_field(Author, Author._meta.get_field("name"), new_field) + # Ensure the foreign key reference was updated. + self.assertForeignKeyExists(Book, "author_id", "schema_author", "renamed") + + @skipIfDBFeature("interprets_empty_strings_as_nulls") + def test_rename_keep_null_status(self): + """ + Renaming a field shouldn't affect the not null status. + """ + with connection.schema_editor() as editor: + editor.create_model(Note) + with self.assertRaises(IntegrityError): + Note.objects.create(info=None) + old_field = Note._meta.get_field("info") + new_field = TextField() + new_field.set_attributes_from_name("detail_info") + with connection.schema_editor() as editor: + editor.alter_field(Note, old_field, new_field, strict=True) + columns = self.column_classes(Note) + self.assertEqual(columns["detail_info"][0], "TextField") + self.assertNotIn("info", columns) + with self.assertRaises(IntegrityError): + NoteRename.objects.create(detail_info=None) + + @isolate_apps("schema") + def test_rename_keep_db_default(self): + """Renaming a field shouldn't affect a database default.""" + + class AuthorDbDefault(Model): + birth_year = IntegerField(db_default=1985) + + class Meta: + app_label = "schema" + + self.isolated_local_models = [AuthorDbDefault] + with connection.schema_editor() as editor: + editor.create_model(AuthorDbDefault) + columns = self.column_classes(AuthorDbDefault) + self.assertEqual(columns["birth_year"][1].default, "1985") + + old_field = AuthorDbDefault._meta.get_field("birth_year") + new_field = IntegerField(db_default=1985) + new_field.set_attributes_from_name("renamed_year") + new_field.model = AuthorDbDefault + with connection.schema_editor( + atomic=connection.features.supports_atomic_references_rename + ) as editor: + editor.alter_field(AuthorDbDefault, old_field, new_field, strict=True) + columns = self.column_classes(AuthorDbDefault) + self.assertEqual(columns["renamed_year"][1].default, "1985") + + @skipUnlessDBFeature( + "supports_column_check_constraints", "can_introspect_check_constraints" + ) + @isolate_apps("schema") + def test_rename_field_with_check_to_truncated_name(self): + class AuthorWithLongColumn(Model): + field_with_very_looooooong_name = PositiveIntegerField(null=True) + + class Meta: + app_label = "schema" + + self.isolated_local_models = [AuthorWithLongColumn] + with connection.schema_editor() as editor: + editor.create_model(AuthorWithLongColumn) + old_field = AuthorWithLongColumn._meta.get_field( + "field_with_very_looooooong_name" + ) + new_field = PositiveIntegerField(null=True) + new_field.set_attributes_from_name("renamed_field_with_very_long_name") + with connection.schema_editor() as editor: + editor.alter_field(AuthorWithLongColumn, old_field, new_field, strict=True) + + new_field_name = truncate_name( + new_field.column, connection.ops.max_name_length() + ) + constraints = self.get_constraints(AuthorWithLongColumn._meta.db_table) + check_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == [new_field_name] and details["check"] + ] + self.assertEqual(len(check_constraints), 1) + + def _test_m2m_create(self, M2MFieldClass): + """ + Tests M2M fields on models during creation + """ + + class LocalBookWithM2M(Model): + author = ForeignKey(Author, CASCADE) + title = CharField(max_length=100, db_index=True) + pub_date = DateTimeField() + tags = M2MFieldClass("TagM2MTest", related_name="books") + + class Meta: + app_label = "schema" + apps = new_apps + + self.local_models = [LocalBookWithM2M] + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(TagM2MTest) + editor.create_model(LocalBookWithM2M) + # Ensure there is now an m2m table there + columns = self.column_classes( + LocalBookWithM2M._meta.get_field("tags").remote_field.through + ) + self.assertEqual( + columns["tagm2mtest_id"][0], + connection.features.introspected_field_types["IntegerField"], + ) + + def test_m2m_create(self): + self._test_m2m_create(ManyToManyField) + + def test_m2m_create_custom(self): + self._test_m2m_create(CustomManyToManyField) + + def test_m2m_create_inherited(self): + self._test_m2m_create(InheritedManyToManyField) + + def _test_m2m_create_through(self, M2MFieldClass): + """ + Tests M2M fields on models during creation with through models + """ + + class LocalTagThrough(Model): + book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE) + tag = ForeignKey("schema.TagM2MTest", CASCADE) + + class Meta: + app_label = "schema" + apps = new_apps + + class LocalBookWithM2MThrough(Model): + tags = M2MFieldClass( + "TagM2MTest", related_name="books", through=LocalTagThrough + ) + + class Meta: + app_label = "schema" + apps = new_apps + + self.local_models = [LocalTagThrough, LocalBookWithM2MThrough] + + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(LocalTagThrough) + editor.create_model(TagM2MTest) + editor.create_model(LocalBookWithM2MThrough) + # Ensure there is now an m2m table there + columns = self.column_classes(LocalTagThrough) + self.assertEqual( + columns["book_id"][0], + connection.features.introspected_field_types["IntegerField"], + ) + self.assertEqual( + columns["tag_id"][0], + connection.features.introspected_field_types["IntegerField"], + ) + + def test_m2m_create_through(self): + self._test_m2m_create_through(ManyToManyField) + + def test_m2m_create_through_custom(self): + self._test_m2m_create_through(CustomManyToManyField) + + def test_m2m_create_through_inherited(self): + self._test_m2m_create_through(InheritedManyToManyField) + + def test_m2m_through_remove(self): + class LocalAuthorNoteThrough(Model): + book = ForeignKey("schema.Author", CASCADE) + tag = ForeignKey("self", CASCADE) + + class Meta: + app_label = "schema" + apps = new_apps + + class LocalNoteWithM2MThrough(Model): + authors = ManyToManyField("schema.Author", through=LocalAuthorNoteThrough) + + class Meta: + app_label = "schema" + apps = new_apps + + self.local_models = [LocalAuthorNoteThrough, LocalNoteWithM2MThrough] + # Create the tables. + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(LocalAuthorNoteThrough) + editor.create_model(LocalNoteWithM2MThrough) + # Remove the through parameter. + old_field = LocalNoteWithM2MThrough._meta.get_field("authors") + new_field = ManyToManyField("Author") + new_field.set_attributes_from_name("authors") + msg = ( + f"Cannot alter field {old_field} into {new_field} - they are not " + f"compatible types (you cannot alter to or from M2M fields, or add or " + f"remove through= on M2M fields)" + ) + with connection.schema_editor() as editor: + with self.assertRaisesMessage(ValueError, msg): + editor.alter_field(LocalNoteWithM2MThrough, old_field, new_field) + + def _test_m2m(self, M2MFieldClass): + """ + Tests adding/removing M2M fields on models + """ + + class LocalAuthorWithM2M(Model): + name = CharField(max_length=255) + + class Meta: + app_label = "schema" + apps = new_apps + + self.local_models = [LocalAuthorWithM2M] + + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(LocalAuthorWithM2M) + editor.create_model(TagM2MTest) + # Create an M2M field + new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors") + new_field.contribute_to_class(LocalAuthorWithM2M, "tags") + # Ensure there's no m2m table there + with self.assertRaises(DatabaseError): + self.column_classes(new_field.remote_field.through) + # Add the field + with CaptureQueriesContext( + connection + ) as ctx, connection.schema_editor() as editor: + editor.add_field(LocalAuthorWithM2M, new_field) + # Table is not rebuilt. + self.assertEqual( + len( + [ + query["sql"] + for query in ctx.captured_queries + if "CREATE TABLE" in query["sql"] + ] + ), + 1, + ) + self.assertIs( + any("DROP TABLE" in query["sql"] for query in ctx.captured_queries), + False, + ) + # Ensure there is now an m2m table there + columns = self.column_classes(new_field.remote_field.through) + self.assertEqual( + columns["tagm2mtest_id"][0], + connection.features.introspected_field_types["IntegerField"], + ) + + # "Alter" the field. This should not rename the DB table to itself. + with connection.schema_editor() as editor: + editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True) + + # Remove the M2M table again + with connection.schema_editor() as editor: + editor.remove_field(LocalAuthorWithM2M, new_field) + # Ensure there's no m2m table there + with self.assertRaises(DatabaseError): + self.column_classes(new_field.remote_field.through) + + # Make sure the model state is coherent with the table one now that + # we've removed the tags field. + opts = LocalAuthorWithM2M._meta + opts.local_many_to_many.remove(new_field) + del new_apps.all_models["schema"][ + new_field.remote_field.through._meta.model_name + ] + opts._expire_cache() + + def test_m2m(self): + self._test_m2m(ManyToManyField) + + def test_m2m_custom(self): + self._test_m2m(CustomManyToManyField) + + def test_m2m_inherited(self): + self._test_m2m(InheritedManyToManyField) + + def _test_m2m_through_alter(self, M2MFieldClass): + """ + Tests altering M2Ms with explicit through models (should no-op) + """ + + class LocalAuthorTag(Model): + author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE) + tag = ForeignKey("schema.TagM2MTest", CASCADE) + + class Meta: + app_label = "schema" + apps = new_apps + + class LocalAuthorWithM2MThrough(Model): + name = CharField(max_length=255) + tags = M2MFieldClass( + "schema.TagM2MTest", related_name="authors", through=LocalAuthorTag + ) + + class Meta: + app_label = "schema" + apps = new_apps + + self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough] + + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(LocalAuthorTag) + editor.create_model(LocalAuthorWithM2MThrough) + editor.create_model(TagM2MTest) + # Ensure the m2m table is there + self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) + # "Alter" the field's blankness. This should not actually do anything. + old_field = LocalAuthorWithM2MThrough._meta.get_field("tags") + new_field = M2MFieldClass( + "schema.TagM2MTest", related_name="authors", through=LocalAuthorTag + ) + new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags") + with connection.schema_editor() as editor: + editor.alter_field( + LocalAuthorWithM2MThrough, old_field, new_field, strict=True + ) + # Ensure the m2m table is still there + self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) + + def test_m2m_through_alter(self): + self._test_m2m_through_alter(ManyToManyField) + + def test_m2m_through_alter_custom(self): + self._test_m2m_through_alter(CustomManyToManyField) + + def test_m2m_through_alter_inherited(self): + self._test_m2m_through_alter(InheritedManyToManyField) + + def _test_m2m_repoint(self, M2MFieldClass): + """ + Tests repointing M2M fields + """ + + class LocalBookWithM2M(Model): + author = ForeignKey(Author, CASCADE) + title = CharField(max_length=100, db_index=True) + pub_date = DateTimeField() + tags = M2MFieldClass("TagM2MTest", related_name="books") + + class Meta: + app_label = "schema" + apps = new_apps + + self.local_models = [LocalBookWithM2M] + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(LocalBookWithM2M) + editor.create_model(TagM2MTest) + editor.create_model(UniqueTest) + # Ensure the M2M exists and points to TagM2MTest + if connection.features.supports_foreign_keys: + self.assertForeignKeyExists( + LocalBookWithM2M._meta.get_field("tags").remote_field.through, + "tagm2mtest_id", + "schema_tagm2mtest", + ) + # Repoint the M2M + old_field = LocalBookWithM2M._meta.get_field("tags") + new_field = M2MFieldClass(UniqueTest) + new_field.contribute_to_class(LocalBookWithM2M, "uniques") + with connection.schema_editor() as editor: + editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True) + # Ensure old M2M is gone + with self.assertRaises(DatabaseError): + self.column_classes( + LocalBookWithM2M._meta.get_field("tags").remote_field.through + ) + + # This model looks like the new model and is used for teardown. + opts = LocalBookWithM2M._meta + opts.local_many_to_many.remove(old_field) + # Ensure the new M2M exists and points to UniqueTest + if connection.features.supports_foreign_keys: + self.assertForeignKeyExists( + new_field.remote_field.through, "uniquetest_id", "schema_uniquetest" + ) + + def test_m2m_repoint(self): + self._test_m2m_repoint(ManyToManyField) + + def test_m2m_repoint_custom(self): + self._test_m2m_repoint(CustomManyToManyField) + + def test_m2m_repoint_inherited(self): + self._test_m2m_repoint(InheritedManyToManyField) + + @isolate_apps("schema") + def test_m2m_rename_field_in_target_model(self): + class LocalTagM2MTest(Model): + title = CharField(max_length=255) + + class Meta: + app_label = "schema" + + class LocalM2M(Model): + tags = ManyToManyField(LocalTagM2MTest) + + class Meta: + app_label = "schema" + + # Create the tables. + with connection.schema_editor() as editor: + editor.create_model(LocalM2M) + editor.create_model(LocalTagM2MTest) + self.isolated_local_models = [LocalM2M, LocalTagM2MTest] + # Ensure the m2m table is there. + self.assertEqual(len(self.column_classes(LocalM2M)), 1) + # Alter a field in LocalTagM2MTest. + old_field = LocalTagM2MTest._meta.get_field("title") + new_field = CharField(max_length=254) + new_field.contribute_to_class(LocalTagM2MTest, "title1") + # @isolate_apps() and inner models are needed to have the model + # relations populated, otherwise this doesn't act as a regression test. + self.assertEqual(len(new_field.model._meta.related_objects), 1) + with connection.schema_editor() as editor: + editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True) + # Ensure the m2m table is still there. + self.assertEqual(len(self.column_classes(LocalM2M)), 1) + + @skipUnlessDBFeature( + "supports_column_check_constraints", "can_introspect_check_constraints" + ) + def test_check_constraints(self): + """ + Tests creating/deleting CHECK constraints + """ + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure the constraint exists + constraints = self.get_constraints(Author._meta.db_table) + if not any( + details["columns"] == ["height"] and details["check"] + for details in constraints.values() + ): + self.fail("No check constraint for height found") + # Alter the column to remove it + old_field = Author._meta.get_field("height") + new_field = IntegerField(null=True, blank=True) + new_field.set_attributes_from_name("height") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + constraints = self.get_constraints(Author._meta.db_table) + for details in constraints.values(): + if details["columns"] == ["height"] and details["check"]: + self.fail("Check constraint for height found") + # Alter the column to re-add it + new_field2 = Author._meta.get_field("height") + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, new_field2, strict=True) + constraints = self.get_constraints(Author._meta.db_table) + if not any( + details["columns"] == ["height"] and details["check"] + for details in constraints.values() + ): + self.fail("No check constraint for height found") + + @skipUnlessDBFeature( + "supports_column_check_constraints", "can_introspect_check_constraints" + ) + @isolate_apps("schema") + def test_check_constraint_timedelta_param(self): + class DurationModel(Model): + duration = DurationField() + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(DurationModel) + self.isolated_local_models = [DurationModel] + constraint_name = "duration_gte_5_minutes" + constraint = CheckConstraint( + check=Q(duration__gt=datetime.timedelta(minutes=5)), + name=constraint_name, + ) + DurationModel._meta.constraints = [constraint] + with connection.schema_editor() as editor: + editor.add_constraint(DurationModel, constraint) + constraints = self.get_constraints(DurationModel._meta.db_table) + self.assertIn(constraint_name, constraints) + with self.assertRaises(IntegrityError), atomic(): + DurationModel.objects.create(duration=datetime.timedelta(minutes=4)) + DurationModel.objects.create(duration=datetime.timedelta(minutes=10)) + + @skipUnlessDBFeature( + "supports_column_check_constraints", "can_introspect_check_constraints" + ) + def test_remove_field_check_does_not_remove_meta_constraints(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + # Add the custom check constraint + constraint = CheckConstraint( + check=Q(height__gte=0), name="author_height_gte_0_check" + ) + custom_constraint_name = constraint.name + Author._meta.constraints = [constraint] + with connection.schema_editor() as editor: + editor.add_constraint(Author, constraint) + # Ensure the constraints exist + constraints = self.get_constraints(Author._meta.db_table) + self.assertIn(custom_constraint_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["height"] + and details["check"] + and name != custom_constraint_name + ] + self.assertEqual(len(other_constraints), 1) + # Alter the column to remove field check + old_field = Author._meta.get_field("height") + new_field = IntegerField(null=True, blank=True) + new_field.set_attributes_from_name("height") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + constraints = self.get_constraints(Author._meta.db_table) + self.assertIn(custom_constraint_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["height"] + and details["check"] + and name != custom_constraint_name + ] + self.assertEqual(len(other_constraints), 0) + # Alter the column to re-add field check + new_field2 = Author._meta.get_field("height") + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, new_field2, strict=True) + constraints = self.get_constraints(Author._meta.db_table) + self.assertIn(custom_constraint_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["height"] + and details["check"] + and name != custom_constraint_name + ] + self.assertEqual(len(other_constraints), 1) + # Drop the check constraint + with connection.schema_editor() as editor: + Author._meta.constraints = [] + editor.remove_constraint(Author, constraint) + + def test_unique(self): + """ + Tests removing and adding unique constraints to a single column. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Tag) + # Ensure the field is unique to begin with + Tag.objects.create(title="foo", slug="foo") + with self.assertRaises(IntegrityError): + Tag.objects.create(title="bar", slug="foo") + Tag.objects.all().delete() + # Alter the slug field to be non-unique + old_field = Tag._meta.get_field("slug") + new_field = SlugField(unique=False) + new_field.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_field(Tag, old_field, new_field, strict=True) + # Ensure the field is no longer unique + Tag.objects.create(title="foo", slug="foo") + Tag.objects.create(title="bar", slug="foo") + Tag.objects.all().delete() + # Alter the slug field to be unique + new_field2 = SlugField(unique=True) + new_field2.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_field(Tag, new_field, new_field2, strict=True) + # Ensure the field is unique again + Tag.objects.create(title="foo", slug="foo") + with self.assertRaises(IntegrityError): + Tag.objects.create(title="bar", slug="foo") + Tag.objects.all().delete() + # Rename the field + new_field3 = SlugField(unique=True) + new_field3.set_attributes_from_name("slug2") + with connection.schema_editor() as editor: + editor.alter_field(Tag, new_field2, new_field3, strict=True) + # Ensure the field is still unique + TagUniqueRename.objects.create(title="foo", slug2="foo") + with self.assertRaises(IntegrityError): + TagUniqueRename.objects.create(title="bar", slug2="foo") + Tag.objects.all().delete() + + def test_unique_name_quoting(self): + old_table_name = TagUniqueRename._meta.db_table + try: + with connection.schema_editor() as editor: + editor.create_model(TagUniqueRename) + editor.alter_db_table(TagUniqueRename, old_table_name, "unique-table") + TagUniqueRename._meta.db_table = "unique-table" + # This fails if the unique index name isn't quoted. + editor.alter_unique_together(TagUniqueRename, [], (("title", "slug2"),)) + finally: + with connection.schema_editor() as editor: + editor.delete_model(TagUniqueRename) + TagUniqueRename._meta.db_table = old_table_name + + @isolate_apps("schema") + @skipUnlessDBFeature("supports_foreign_keys") + def test_unique_no_unnecessary_fk_drops(self): + """ + If AlterField isn't selective about dropping foreign key constraints + when modifying a field with a unique constraint, the AlterField + incorrectly drops and recreates the Book.author foreign key even though + it doesn't restrict the field being changed (#29193). + """ + + class Author(Model): + name = CharField(max_length=254, unique=True) + + class Meta: + app_label = "schema" + + class Book(Model): + author = ForeignKey(Author, CASCADE) + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + new_field = CharField(max_length=255, unique=True) + new_field.model = Author + new_field.set_attributes_from_name("name") + with self.assertLogs("django.db.backends.schema", "DEBUG") as cm: + with connection.schema_editor() as editor: + editor.alter_field(Author, Author._meta.get_field("name"), new_field) + # One SQL statement is executed to alter the field. + self.assertEqual(len(cm.records), 1) + + @isolate_apps("schema") + def test_unique_and_reverse_m2m(self): + """ + AlterField can modify a unique field when there's a reverse M2M + relation on the model. + """ + + class Tag(Model): + title = CharField(max_length=255) + slug = SlugField(unique=True) + + class Meta: + app_label = "schema" + + class Book(Model): + tags = ManyToManyField(Tag, related_name="books") + + class Meta: + app_label = "schema" + + self.isolated_local_models = [Book._meta.get_field("tags").remote_field.through] + with connection.schema_editor() as editor: + editor.create_model(Tag) + editor.create_model(Book) + new_field = SlugField(max_length=75, unique=True) + new_field.model = Tag + new_field.set_attributes_from_name("slug") + with self.assertLogs("django.db.backends.schema", "DEBUG") as cm: + with connection.schema_editor() as editor: + editor.alter_field(Tag, Tag._meta.get_field("slug"), new_field) + # One SQL statement is executed to alter the field. + self.assertEqual(len(cm.records), 1) + # Ensure that the field is still unique. + Tag.objects.create(title="foo", slug="foo") + with self.assertRaises(IntegrityError): + Tag.objects.create(title="bar", slug="foo") + + def test_remove_ignored_unique_constraint_not_create_fk_index(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + constraint = UniqueConstraint( + "author", + condition=Q(title__in=["tHGttG", "tRatEotU"]), + name="book_author_condition_uniq", + ) + # Add unique constraint. + with connection.schema_editor() as editor: + editor.add_constraint(Book, constraint) + old_constraints = self.get_constraints_for_column( + Book, + Book._meta.get_field("author").column, + ) + # Remove unique constraint. + with connection.schema_editor() as editor: + editor.remove_constraint(Book, constraint) + new_constraints = self.get_constraints_for_column( + Book, + Book._meta.get_field("author").column, + ) + # Redundant foreign key index is not added. + self.assertEqual( + len(old_constraints) - 1 + if connection.features.supports_partial_indexes + else len(old_constraints), + len(new_constraints), + ) + + @skipUnlessDBFeature("allows_multiple_constraints_on_same_fields") + def test_remove_field_unique_does_not_remove_meta_constraints(self): + with connection.schema_editor() as editor: + editor.create_model(AuthorWithUniqueName) + self.local_models = [AuthorWithUniqueName] + # Add the custom unique constraint + constraint = UniqueConstraint(fields=["name"], name="author_name_uniq") + custom_constraint_name = constraint.name + AuthorWithUniqueName._meta.constraints = [constraint] + with connection.schema_editor() as editor: + editor.add_constraint(AuthorWithUniqueName, constraint) + # Ensure the constraints exist + constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) + self.assertIn(custom_constraint_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["name"] + and details["unique"] + and name != custom_constraint_name + ] + self.assertEqual(len(other_constraints), 1) + # Alter the column to remove field uniqueness + old_field = AuthorWithUniqueName._meta.get_field("name") + new_field = CharField(max_length=255) + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field(AuthorWithUniqueName, old_field, new_field, strict=True) + constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) + self.assertIn(custom_constraint_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["name"] + and details["unique"] + and name != custom_constraint_name + ] + self.assertEqual(len(other_constraints), 0) + # Alter the column to re-add field uniqueness + new_field2 = AuthorWithUniqueName._meta.get_field("name") + with connection.schema_editor() as editor: + editor.alter_field(AuthorWithUniqueName, new_field, new_field2, strict=True) + constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) + self.assertIn(custom_constraint_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["name"] + and details["unique"] + and name != custom_constraint_name + ] + self.assertEqual(len(other_constraints), 1) + # Drop the unique constraint + with connection.schema_editor() as editor: + AuthorWithUniqueName._meta.constraints = [] + editor.remove_constraint(AuthorWithUniqueName, constraint) + + def test_unique_together(self): + """ + Tests removing and adding unique_together constraints on a model. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(UniqueTest) + # Ensure the fields are unique to begin with + UniqueTest.objects.create(year=2012, slug="foo") + UniqueTest.objects.create(year=2011, slug="foo") + UniqueTest.objects.create(year=2011, slug="bar") + with self.assertRaises(IntegrityError): + UniqueTest.objects.create(year=2012, slug="foo") + UniqueTest.objects.all().delete() + # Alter the model to its non-unique-together companion + with connection.schema_editor() as editor: + editor.alter_unique_together( + UniqueTest, UniqueTest._meta.unique_together, [] + ) + # Ensure the fields are no longer unique + UniqueTest.objects.create(year=2012, slug="foo") + UniqueTest.objects.create(year=2012, slug="foo") + UniqueTest.objects.all().delete() + # Alter it back + new_field2 = SlugField(unique=True) + new_field2.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_unique_together( + UniqueTest, [], UniqueTest._meta.unique_together + ) + # Ensure the fields are unique again + UniqueTest.objects.create(year=2012, slug="foo") + with self.assertRaises(IntegrityError): + UniqueTest.objects.create(year=2012, slug="foo") + UniqueTest.objects.all().delete() + + def test_unique_together_with_fk(self): + """ + Tests removing and adding unique_together constraints that include + a foreign key. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + # Ensure the fields are unique to begin with + self.assertEqual(Book._meta.unique_together, ()) + # Add the unique_together constraint + with connection.schema_editor() as editor: + editor.alter_unique_together(Book, [], [["author", "title"]]) + # Alter it back + with connection.schema_editor() as editor: + editor.alter_unique_together(Book, [["author", "title"]], []) + + def test_unique_together_with_fk_with_existing_index(self): + """ + Tests removing and adding unique_together constraints that include + a foreign key, where the foreign key is added after the model is + created. + """ + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(BookWithoutAuthor) + new_field = ForeignKey(Author, CASCADE) + new_field.set_attributes_from_name("author") + editor.add_field(BookWithoutAuthor, new_field) + # Ensure the fields aren't unique to begin with + self.assertEqual(Book._meta.unique_together, ()) + # Add the unique_together constraint + with connection.schema_editor() as editor: + editor.alter_unique_together(Book, [], [["author", "title"]]) + # Alter it back + with connection.schema_editor() as editor: + editor.alter_unique_together(Book, [["author", "title"]], []) + + def _test_composed_index_with_fk(self, index): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + table = Book._meta.db_table + self.assertEqual(Book._meta.indexes, []) + Book._meta.indexes = [index] + with connection.schema_editor() as editor: + editor.add_index(Book, index) + self.assertIn(index.name, self.get_constraints(table)) + Book._meta.indexes = [] + with connection.schema_editor() as editor: + editor.remove_index(Book, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + def test_composed_index_with_fk(self): + index = Index(fields=["author", "title"], name="book_author_title_idx") + self._test_composed_index_with_fk(index) + + def test_composed_desc_index_with_fk(self): + index = Index(fields=["-author", "title"], name="book_author_title_idx") + self._test_composed_index_with_fk(index) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_composed_func_index_with_fk(self): + index = Index(F("author"), F("title"), name="book_author_title_idx") + self._test_composed_index_with_fk(index) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_composed_desc_func_index_with_fk(self): + index = Index(F("author").desc(), F("title"), name="book_author_title_idx") + self._test_composed_index_with_fk(index) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_composed_func_transform_index_with_fk(self): + index = Index(F("title__lower"), name="book_title_lower_idx") + with register_lookup(CharField, Lower): + self._test_composed_index_with_fk(index) + + def _test_composed_constraint_with_fk(self, constraint): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + table = Book._meta.db_table + self.assertEqual(Book._meta.constraints, []) + Book._meta.constraints = [constraint] + with connection.schema_editor() as editor: + editor.add_constraint(Book, constraint) + self.assertIn(constraint.name, self.get_constraints(table)) + Book._meta.constraints = [] + with connection.schema_editor() as editor: + editor.remove_constraint(Book, constraint) + self.assertNotIn(constraint.name, self.get_constraints(table)) + + def test_composed_constraint_with_fk(self): + constraint = UniqueConstraint( + fields=["author", "title"], + name="book_author_title_uniq", + ) + self._test_composed_constraint_with_fk(constraint) + + @skipUnlessDBFeature( + "supports_column_check_constraints", "can_introspect_check_constraints" + ) + def test_composed_check_constraint_with_fk(self): + constraint = CheckConstraint(check=Q(author__gt=0), name="book_author_check") + self._test_composed_constraint_with_fk(constraint) + + @skipUnlessDBFeature("allows_multiple_constraints_on_same_fields") + def test_remove_unique_together_does_not_remove_meta_constraints(self): + with connection.schema_editor() as editor: + editor.create_model(AuthorWithUniqueNameAndBirthday) + self.local_models = [AuthorWithUniqueNameAndBirthday] + # Add the custom unique constraint + constraint = UniqueConstraint( + fields=["name", "birthday"], name="author_name_birthday_uniq" + ) + custom_constraint_name = constraint.name + AuthorWithUniqueNameAndBirthday._meta.constraints = [constraint] + with connection.schema_editor() as editor: + editor.add_constraint(AuthorWithUniqueNameAndBirthday, constraint) + # Ensure the constraints exist + constraints = self.get_constraints( + AuthorWithUniqueNameAndBirthday._meta.db_table + ) + self.assertIn(custom_constraint_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["name", "birthday"] + and details["unique"] + and name != custom_constraint_name + ] + self.assertEqual(len(other_constraints), 1) + # Remove unique together + unique_together = AuthorWithUniqueNameAndBirthday._meta.unique_together + with connection.schema_editor() as editor: + editor.alter_unique_together( + AuthorWithUniqueNameAndBirthday, unique_together, [] + ) + constraints = self.get_constraints( + AuthorWithUniqueNameAndBirthday._meta.db_table + ) + self.assertIn(custom_constraint_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["name", "birthday"] + and details["unique"] + and name != custom_constraint_name + ] + self.assertEqual(len(other_constraints), 0) + # Re-add unique together + with connection.schema_editor() as editor: + editor.alter_unique_together( + AuthorWithUniqueNameAndBirthday, [], unique_together + ) + constraints = self.get_constraints( + AuthorWithUniqueNameAndBirthday._meta.db_table + ) + self.assertIn(custom_constraint_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["name", "birthday"] + and details["unique"] + and name != custom_constraint_name + ] + self.assertEqual(len(other_constraints), 1) + # Drop the unique constraint + with connection.schema_editor() as editor: + AuthorWithUniqueNameAndBirthday._meta.constraints = [] + editor.remove_constraint(AuthorWithUniqueNameAndBirthday, constraint) + + def test_unique_constraint(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + constraint = UniqueConstraint(fields=["name"], name="name_uq") + # Add constraint. + with connection.schema_editor() as editor: + editor.add_constraint(Author, constraint) + sql = constraint.create_sql(Author, editor) + table = Author._meta.db_table + self.assertIs(sql.references_table(table), True) + self.assertIs(sql.references_column(table, "name"), True) + # Remove constraint. + with connection.schema_editor() as editor: + editor.remove_constraint(Author, constraint) + self.assertNotIn(constraint.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_unique_constraint(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + constraint = UniqueConstraint(Upper("name").desc(), name="func_upper_uq") + # Add constraint. + with connection.schema_editor() as editor: + editor.add_constraint(Author, constraint) + sql = constraint.create_sql(Author, editor) + table = Author._meta.db_table + constraints = self.get_constraints(table) + if connection.features.supports_index_column_ordering: + self.assertIndexOrder(table, constraint.name, ["DESC"]) + self.assertIn(constraint.name, constraints) + self.assertIs(constraints[constraint.name]["unique"], True) + # SQL contains a database function. + self.assertIs(sql.references_column(table, "name"), True) + self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql)) + # Remove constraint. + with connection.schema_editor() as editor: + editor.remove_constraint(Author, constraint) + self.assertNotIn(constraint.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_composite_func_unique_constraint(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(BookWithSlug) + constraint = UniqueConstraint( + Upper("title"), + Lower("slug"), + name="func_upper_lower_unq", + ) + # Add constraint. + with connection.schema_editor() as editor: + editor.add_constraint(BookWithSlug, constraint) + sql = constraint.create_sql(BookWithSlug, editor) + table = BookWithSlug._meta.db_table + constraints = self.get_constraints(table) + self.assertIn(constraint.name, constraints) + self.assertIs(constraints[constraint.name]["unique"], True) + # SQL contains database functions. + self.assertIs(sql.references_column(table, "title"), True) + self.assertIs(sql.references_column(table, "slug"), True) + sql = str(sql) + self.assertIn("UPPER(%s)" % editor.quote_name("title"), sql) + self.assertIn("LOWER(%s)" % editor.quote_name("slug"), sql) + self.assertLess(sql.index("UPPER"), sql.index("LOWER")) + # Remove constraint. + with connection.schema_editor() as editor: + editor.remove_constraint(BookWithSlug, constraint) + self.assertNotIn(constraint.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_unique_constraint_field_and_expression(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + constraint = UniqueConstraint( + F("height").desc(), + "uuid", + Lower("name").asc(), + name="func_f_lower_field_unq", + ) + # Add constraint. + with connection.schema_editor() as editor: + editor.add_constraint(Author, constraint) + sql = constraint.create_sql(Author, editor) + table = Author._meta.db_table + if connection.features.supports_index_column_ordering: + self.assertIndexOrder(table, constraint.name, ["DESC", "ASC", "ASC"]) + constraints = self.get_constraints(table) + self.assertIs(constraints[constraint.name]["unique"], True) + self.assertEqual(len(constraints[constraint.name]["columns"]), 3) + self.assertEqual(constraints[constraint.name]["columns"][1], "uuid") + # SQL contains database functions and columns. + self.assertIs(sql.references_column(table, "height"), True) + self.assertIs(sql.references_column(table, "name"), True) + self.assertIs(sql.references_column(table, "uuid"), True) + self.assertIn("LOWER(%s)" % editor.quote_name("name"), str(sql)) + # Remove constraint. + with connection.schema_editor() as editor: + editor.remove_constraint(Author, constraint) + self.assertNotIn(constraint.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes", "supports_partial_indexes") + def test_func_unique_constraint_partial(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + constraint = UniqueConstraint( + Upper("name"), + name="func_upper_cond_weight_uq", + condition=Q(weight__isnull=False), + ) + # Add constraint. + with connection.schema_editor() as editor: + editor.add_constraint(Author, constraint) + sql = constraint.create_sql(Author, editor) + table = Author._meta.db_table + constraints = self.get_constraints(table) + self.assertIn(constraint.name, constraints) + self.assertIs(constraints[constraint.name]["unique"], True) + self.assertIs(sql.references_column(table, "name"), True) + self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql)) + self.assertIn( + "WHERE %s IS NOT NULL" % editor.quote_name("weight"), + str(sql), + ) + # Remove constraint. + with connection.schema_editor() as editor: + editor.remove_constraint(Author, constraint) + self.assertNotIn(constraint.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes", "supports_covering_indexes") + def test_func_unique_constraint_covering(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + constraint = UniqueConstraint( + Upper("name"), + name="func_upper_covering_uq", + include=["weight", "height"], + ) + # Add constraint. + with connection.schema_editor() as editor: + editor.add_constraint(Author, constraint) + sql = constraint.create_sql(Author, editor) + table = Author._meta.db_table + constraints = self.get_constraints(table) + self.assertIn(constraint.name, constraints) + self.assertIs(constraints[constraint.name]["unique"], True) + self.assertEqual( + constraints[constraint.name]["columns"], + [None, "weight", "height"], + ) + self.assertIs(sql.references_column(table, "name"), True) + self.assertIs(sql.references_column(table, "weight"), True) + self.assertIs(sql.references_column(table, "height"), True) + self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql)) + self.assertIn( + "INCLUDE (%s, %s)" + % ( + editor.quote_name("weight"), + editor.quote_name("height"), + ), + str(sql), + ) + # Remove constraint. + with connection.schema_editor() as editor: + editor.remove_constraint(Author, constraint) + self.assertNotIn(constraint.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_unique_constraint_lookups(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs): + constraint = UniqueConstraint( + F("name__lower"), + F("weight__abs"), + name="func_lower_abs_lookup_uq", + ) + # Add constraint. + with connection.schema_editor() as editor: + editor.add_constraint(Author, constraint) + sql = constraint.create_sql(Author, editor) + table = Author._meta.db_table + constraints = self.get_constraints(table) + self.assertIn(constraint.name, constraints) + self.assertIs(constraints[constraint.name]["unique"], True) + # SQL contains columns. + self.assertIs(sql.references_column(table, "name"), True) + self.assertIs(sql.references_column(table, "weight"), True) + # Remove constraint. + with connection.schema_editor() as editor: + editor.remove_constraint(Author, constraint) + self.assertNotIn(constraint.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_unique_constraint_collate(self): + collation = connection.features.test_collations.get("non_default") + if not collation: + self.skipTest("This backend does not support case-insensitive collations.") + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(BookWithSlug) + constraint = UniqueConstraint( + Collate(F("title"), collation=collation).desc(), + Collate("slug", collation=collation), + name="func_collate_uq", + ) + # Add constraint. + with connection.schema_editor() as editor: + editor.add_constraint(BookWithSlug, constraint) + sql = constraint.create_sql(BookWithSlug, editor) + table = BookWithSlug._meta.db_table + constraints = self.get_constraints(table) + self.assertIn(constraint.name, constraints) + self.assertIs(constraints[constraint.name]["unique"], True) + if connection.features.supports_index_column_ordering: + self.assertIndexOrder(table, constraint.name, ["DESC", "ASC"]) + # SQL contains columns and a collation. + self.assertIs(sql.references_column(table, "title"), True) + self.assertIs(sql.references_column(table, "slug"), True) + self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql)) + # Remove constraint. + with connection.schema_editor() as editor: + editor.remove_constraint(BookWithSlug, constraint) + self.assertNotIn(constraint.name, self.get_constraints(table)) + + @skipIfDBFeature("supports_expression_indexes") + def test_func_unique_constraint_unsupported(self): + # UniqueConstraint is ignored on databases that don't support indexes on + # expressions. + with connection.schema_editor() as editor: + editor.create_model(Author) + constraint = UniqueConstraint(F("name"), name="func_name_uq") + with connection.schema_editor() as editor, self.assertNumQueries(0): + self.assertIsNone(editor.add_constraint(Author, constraint)) + self.assertIsNone(editor.remove_constraint(Author, constraint)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_unique_constraint_nonexistent_field(self): + constraint = UniqueConstraint(Lower("nonexistent"), name="func_nonexistent_uq") + msg = ( + "Cannot resolve keyword 'nonexistent' into field. Choices are: " + "height, id, name, uuid, weight" + ) + with self.assertRaisesMessage(FieldError, msg): + with connection.schema_editor() as editor: + editor.add_constraint(Author, constraint) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_unique_constraint_nondeterministic(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + constraint = UniqueConstraint(Random(), name="func_random_uq") + with connection.schema_editor() as editor: + with self.assertRaises(DatabaseError): + editor.add_constraint(Author, constraint) + + @ignore_warnings(category=RemovedInDjango51Warning) + def test_index_together(self): + """ + Tests removing and adding index_together constraints on a model. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Tag) + # Ensure there's no index on the year/slug columns first + self.assertIs( + any( + c["index"] + for c in self.get_constraints("schema_tag").values() + if c["columns"] == ["slug", "title"] + ), + False, + ) + # Alter the model to add an index + with connection.schema_editor() as editor: + editor.alter_index_together(Tag, [], [("slug", "title")]) + # Ensure there is now an index + self.assertIs( + any( + c["index"] + for c in self.get_constraints("schema_tag").values() + if c["columns"] == ["slug", "title"] + ), + True, + ) + # Alter it back + new_field2 = SlugField(unique=True) + new_field2.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_index_together(Tag, [("slug", "title")], []) + # Ensure there's no index + self.assertIs( + any( + c["index"] + for c in self.get_constraints("schema_tag").values() + if c["columns"] == ["slug", "title"] + ), + False, + ) + + @ignore_warnings(category=RemovedInDjango51Warning) + def test_index_together_with_fk(self): + """ + Tests removing and adding index_together constraints that include + a foreign key. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + # Ensure the fields are unique to begin with + self.assertEqual(Book._meta.index_together, ()) + # Add the unique_together constraint + with connection.schema_editor() as editor: + editor.alter_index_together(Book, [], [["author", "title"]]) + # Alter it back + with connection.schema_editor() as editor: + editor.alter_index_together(Book, [["author", "title"]], []) + + @ignore_warnings(category=RemovedInDjango51Warning) + @isolate_apps("schema") + def test_create_index_together(self): + """ + Tests creating models with index_together already defined + """ + + class TagIndexed(Model): + title = CharField(max_length=255) + slug = SlugField(unique=True) + + class Meta: + app_label = "schema" + index_together = [["slug", "title"]] + + # Create the table + with connection.schema_editor() as editor: + editor.create_model(TagIndexed) + self.isolated_local_models = [TagIndexed] + # Ensure there is an index + self.assertIs( + any( + c["index"] + for c in self.get_constraints("schema_tagindexed").values() + if c["columns"] == ["slug", "title"] + ), + True, + ) + + @skipUnlessDBFeature("allows_multiple_constraints_on_same_fields") + @ignore_warnings(category=RemovedInDjango51Warning) + @isolate_apps("schema") + def test_remove_index_together_does_not_remove_meta_indexes(self): + class AuthorWithIndexedNameAndBirthday(Model): + name = CharField(max_length=255) + birthday = DateField() + + class Meta: + app_label = "schema" + index_together = [["name", "birthday"]] + + with connection.schema_editor() as editor: + editor.create_model(AuthorWithIndexedNameAndBirthday) + self.isolated_local_models = [AuthorWithIndexedNameAndBirthday] + # Add the custom index + index = Index(fields=["name", "birthday"], name="author_name_birthday_idx") + custom_index_name = index.name + AuthorWithIndexedNameAndBirthday._meta.indexes = [index] + with connection.schema_editor() as editor: + editor.add_index(AuthorWithIndexedNameAndBirthday, index) + # Ensure the indexes exist + constraints = self.get_constraints( + AuthorWithIndexedNameAndBirthday._meta.db_table + ) + self.assertIn(custom_index_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["name", "birthday"] + and details["index"] + and name != custom_index_name + ] + self.assertEqual(len(other_constraints), 1) + # Remove index together + index_together = AuthorWithIndexedNameAndBirthday._meta.index_together + with connection.schema_editor() as editor: + editor.alter_index_together( + AuthorWithIndexedNameAndBirthday, index_together, [] + ) + constraints = self.get_constraints( + AuthorWithIndexedNameAndBirthday._meta.db_table + ) + self.assertIn(custom_index_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["name", "birthday"] + and details["index"] + and name != custom_index_name + ] + self.assertEqual(len(other_constraints), 0) + # Re-add index together + with connection.schema_editor() as editor: + editor.alter_index_together( + AuthorWithIndexedNameAndBirthday, [], index_together + ) + constraints = self.get_constraints( + AuthorWithIndexedNameAndBirthday._meta.db_table + ) + self.assertIn(custom_index_name, constraints) + other_constraints = [ + name + for name, details in constraints.items() + if details["columns"] == ["name", "birthday"] + and details["index"] + and name != custom_index_name + ] + self.assertEqual(len(other_constraints), 1) + # Drop the index + with connection.schema_editor() as editor: + AuthorWithIndexedNameAndBirthday._meta.indexes = [] + editor.remove_index(AuthorWithIndexedNameAndBirthday, index) + + @isolate_apps("schema") + def test_db_table(self): + """ + Tests renaming of the table + """ + + class Author(Model): + name = CharField(max_length=255) + + class Meta: + app_label = "schema" + + class Book(Model): + author = ForeignKey(Author, CASCADE) + + class Meta: + app_label = "schema" + + # Create the table and one referring it. + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + # Ensure the table is there to begin with + columns = self.column_classes(Author) + self.assertEqual( + columns["name"][0], + connection.features.introspected_field_types["CharField"], + ) + # Alter the table + with connection.schema_editor( + atomic=connection.features.supports_atomic_references_rename + ) as editor: + editor.alter_db_table(Author, "schema_author", "schema_otherauthor") + Author._meta.db_table = "schema_otherauthor" + columns = self.column_classes(Author) + self.assertEqual( + columns["name"][0], + connection.features.introspected_field_types["CharField"], + ) + # Ensure the foreign key reference was updated + self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor") + # Alter the table again + with connection.schema_editor( + atomic=connection.features.supports_atomic_references_rename + ) as editor: + editor.alter_db_table(Author, "schema_otherauthor", "schema_author") + # Ensure the table is still there + Author._meta.db_table = "schema_author" + columns = self.column_classes(Author) + self.assertEqual( + columns["name"][0], + connection.features.introspected_field_types["CharField"], + ) + + def test_add_remove_index(self): + """ + Tests index addition and removal + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure the table is there and has no index + self.assertNotIn("title", self.get_indexes(Author._meta.db_table)) + # Add the index + index = Index(fields=["name"], name="author_title_idx") + with connection.schema_editor() as editor: + editor.add_index(Author, index) + self.assertIn("name", self.get_indexes(Author._meta.db_table)) + # Drop the index + with connection.schema_editor() as editor: + editor.remove_index(Author, index) + self.assertNotIn("name", self.get_indexes(Author._meta.db_table)) + + def test_remove_db_index_doesnt_remove_custom_indexes(self): + """ + Changing db_index to False doesn't remove indexes from Meta.indexes. + """ + with connection.schema_editor() as editor: + editor.create_model(AuthorWithIndexedName) + self.local_models = [AuthorWithIndexedName] + # Ensure the table has its index + self.assertIn("name", self.get_indexes(AuthorWithIndexedName._meta.db_table)) + + # Add the custom index + index = Index(fields=["-name"], name="author_name_idx") + author_index_name = index.name + with connection.schema_editor() as editor: + db_index_name = editor._create_index_name( + table_name=AuthorWithIndexedName._meta.db_table, + column_names=("name",), + ) + try: + AuthorWithIndexedName._meta.indexes = [index] + with connection.schema_editor() as editor: + editor.add_index(AuthorWithIndexedName, index) + old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) + self.assertIn(author_index_name, old_constraints) + self.assertIn(db_index_name, old_constraints) + # Change name field to db_index=False + old_field = AuthorWithIndexedName._meta.get_field("name") + new_field = CharField(max_length=255) + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field( + AuthorWithIndexedName, old_field, new_field, strict=True + ) + new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) + self.assertNotIn(db_index_name, new_constraints) + # The index from Meta.indexes is still in the database. + self.assertIn(author_index_name, new_constraints) + # Drop the index + with connection.schema_editor() as editor: + editor.remove_index(AuthorWithIndexedName, index) + finally: + AuthorWithIndexedName._meta.indexes = [] + + def test_order_index(self): + """ + Indexes defined with ordering (ASC/DESC) defined on column + """ + with connection.schema_editor() as editor: + editor.create_model(Author) + # The table doesn't have an index + self.assertNotIn("title", self.get_indexes(Author._meta.db_table)) + index_name = "author_name_idx" + # Add the index + index = Index(fields=["name", "-weight"], name=index_name) + with connection.schema_editor() as editor: + editor.add_index(Author, index) + if connection.features.supports_index_column_ordering: + self.assertIndexOrder(Author._meta.db_table, index_name, ["ASC", "DESC"]) + # Drop the index + with connection.schema_editor() as editor: + editor.remove_index(Author, index) + + def test_indexes(self): + """ + Tests creation/altering of indexes + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + # Ensure the table is there and has the right index + self.assertIn( + "title", + self.get_indexes(Book._meta.db_table), + ) + # Alter to remove the index + old_field = Book._meta.get_field("title") + new_field = CharField(max_length=100, db_index=False) + new_field.set_attributes_from_name("title") + with connection.schema_editor() as editor: + editor.alter_field(Book, old_field, new_field, strict=True) + # Ensure the table is there and has no index + self.assertNotIn( + "title", + self.get_indexes(Book._meta.db_table), + ) + # Alter to re-add the index + new_field2 = Book._meta.get_field("title") + with connection.schema_editor() as editor: + editor.alter_field(Book, new_field, new_field2, strict=True) + # Ensure the table is there and has the index again + self.assertIn( + "title", + self.get_indexes(Book._meta.db_table), + ) + # Add a unique column, verify that creates an implicit index + new_field3 = BookWithSlug._meta.get_field("slug") + with connection.schema_editor() as editor: + editor.add_field(Book, new_field3) + self.assertIn( + "slug", + self.get_uniques(Book._meta.db_table), + ) + # Remove the unique, check the index goes with it + new_field4 = CharField(max_length=20, unique=False) + new_field4.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True) + self.assertNotIn( + "slug", + self.get_uniques(Book._meta.db_table), + ) + + def test_text_field_with_db_index(self): + with connection.schema_editor() as editor: + editor.create_model(AuthorTextFieldWithIndex) + # The text_field index is present if the database supports it. + assertion = ( + self.assertIn + if connection.features.supports_index_on_text_field + else self.assertNotIn + ) + assertion( + "text_field", self.get_indexes(AuthorTextFieldWithIndex._meta.db_table) + ) + + def _index_expressions_wrappers(self): + index_expression = IndexExpression() + index_expression.set_wrapper_classes(connection) + return ", ".join( + [ + wrapper_cls.__qualname__ + for wrapper_cls in index_expression.wrapper_classes + ] + ) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_index_multiple_wrapper_references(self): + index = Index(OrderBy(F("name").desc(), descending=True), name="name") + msg = ( + "Multiple references to %s can't be used in an indexed expression." + % self._index_expressions_wrappers() + ) + with connection.schema_editor() as editor: + with self.assertRaisesMessage(ValueError, msg): + editor.add_index(Author, index) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_index_invalid_topmost_expressions(self): + index = Index(Upper(F("name").desc()), name="name") + msg = ( + "%s must be topmost expressions in an indexed expression." + % self._index_expressions_wrappers() + ) + with connection.schema_editor() as editor: + with self.assertRaisesMessage(ValueError, msg): + editor.add_index(Author, index) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_index(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + index = Index(Lower("name").desc(), name="func_lower_idx") + # Add index. + with connection.schema_editor() as editor: + editor.add_index(Author, index) + sql = index.create_sql(Author, editor) + table = Author._meta.db_table + if connection.features.supports_index_column_ordering: + self.assertIndexOrder(table, index.name, ["DESC"]) + # SQL contains a database function. + self.assertIs(sql.references_column(table, "name"), True) + self.assertIn("LOWER(%s)" % editor.quote_name("name"), str(sql)) + # Remove index. + with connection.schema_editor() as editor: + editor.remove_index(Author, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_index_f(self): + with connection.schema_editor() as editor: + editor.create_model(Tag) + index = Index("slug", F("title").desc(), name="func_f_idx") + # Add index. + with connection.schema_editor() as editor: + editor.add_index(Tag, index) + sql = index.create_sql(Tag, editor) + table = Tag._meta.db_table + self.assertIn(index.name, self.get_constraints(table)) + if connection.features.supports_index_column_ordering: + self.assertIndexOrder(Tag._meta.db_table, index.name, ["ASC", "DESC"]) + # SQL contains columns. + self.assertIs(sql.references_column(table, "slug"), True) + self.assertIs(sql.references_column(table, "title"), True) + # Remove index. + with connection.schema_editor() as editor: + editor.remove_index(Tag, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_index_lookups(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs): + index = Index( + F("name__lower"), + F("weight__abs"), + name="func_lower_abs_lookup_idx", + ) + # Add index. + with connection.schema_editor() as editor: + editor.add_index(Author, index) + sql = index.create_sql(Author, editor) + table = Author._meta.db_table + self.assertIn(index.name, self.get_constraints(table)) + # SQL contains columns. + self.assertIs(sql.references_column(table, "name"), True) + self.assertIs(sql.references_column(table, "weight"), True) + # Remove index. + with connection.schema_editor() as editor: + editor.remove_index(Author, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_composite_func_index(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + index = Index(Lower("name"), Upper("name"), name="func_lower_upper_idx") + # Add index. + with connection.schema_editor() as editor: + editor.add_index(Author, index) + sql = index.create_sql(Author, editor) + table = Author._meta.db_table + self.assertIn(index.name, self.get_constraints(table)) + # SQL contains database functions. + self.assertIs(sql.references_column(table, "name"), True) + sql = str(sql) + self.assertIn("LOWER(%s)" % editor.quote_name("name"), sql) + self.assertIn("UPPER(%s)" % editor.quote_name("name"), sql) + self.assertLess(sql.index("LOWER"), sql.index("UPPER")) + # Remove index. + with connection.schema_editor() as editor: + editor.remove_index(Author, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_composite_func_index_field_and_expression(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + index = Index( + F("author").desc(), + Lower("title").asc(), + "pub_date", + name="func_f_lower_field_idx", + ) + # Add index. + with connection.schema_editor() as editor: + editor.add_index(Book, index) + sql = index.create_sql(Book, editor) + table = Book._meta.db_table + constraints = self.get_constraints(table) + if connection.features.supports_index_column_ordering: + self.assertIndexOrder(table, index.name, ["DESC", "ASC", "ASC"]) + self.assertEqual(len(constraints[index.name]["columns"]), 3) + self.assertEqual(constraints[index.name]["columns"][2], "pub_date") + # SQL contains database functions and columns. + self.assertIs(sql.references_column(table, "author_id"), True) + self.assertIs(sql.references_column(table, "title"), True) + self.assertIs(sql.references_column(table, "pub_date"), True) + self.assertIn("LOWER(%s)" % editor.quote_name("title"), str(sql)) + # Remove index. + with connection.schema_editor() as editor: + editor.remove_index(Book, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + @isolate_apps("schema") + def test_func_index_f_decimalfield(self): + class Node(Model): + value = DecimalField(max_digits=5, decimal_places=2) + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(Node) + index = Index(F("value"), name="func_f_decimalfield_idx") + # Add index. + with connection.schema_editor() as editor: + editor.add_index(Node, index) + sql = index.create_sql(Node, editor) + table = Node._meta.db_table + self.assertIn(index.name, self.get_constraints(table)) + self.assertIs(sql.references_column(table, "value"), True) + # SQL doesn't contain casting. + self.assertNotIn("CAST", str(sql)) + # Remove index. + with connection.schema_editor() as editor: + editor.remove_index(Node, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_index_cast(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + index = Index(Cast("weight", FloatField()), name="func_cast_idx") + # Add index. + with connection.schema_editor() as editor: + editor.add_index(Author, index) + sql = index.create_sql(Author, editor) + table = Author._meta.db_table + self.assertIn(index.name, self.get_constraints(table)) + self.assertIs(sql.references_column(table, "weight"), True) + # Remove index. + with connection.schema_editor() as editor: + editor.remove_index(Author, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_index_collate(self): + collation = connection.features.test_collations.get("non_default") + if not collation: + self.skipTest("This backend does not support case-insensitive collations.") + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(BookWithSlug) + index = Index( + Collate(F("title"), collation=collation).desc(), + Collate("slug", collation=collation), + name="func_collate_idx", + ) + # Add index. + with connection.schema_editor() as editor: + editor.add_index(BookWithSlug, index) + sql = index.create_sql(BookWithSlug, editor) + table = Book._meta.db_table + self.assertIn(index.name, self.get_constraints(table)) + if connection.features.supports_index_column_ordering: + self.assertIndexOrder(table, index.name, ["DESC", "ASC"]) + # SQL contains columns and a collation. + self.assertIs(sql.references_column(table, "title"), True) + self.assertIs(sql.references_column(table, "slug"), True) + self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql)) + # Remove index. + with connection.schema_editor() as editor: + editor.remove_index(Book, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + @skipIfDBFeature("collate_as_index_expression") + def test_func_index_collate_f_ordered(self): + collation = connection.features.test_collations.get("non_default") + if not collation: + self.skipTest("This backend does not support case-insensitive collations.") + with connection.schema_editor() as editor: + editor.create_model(Author) + index = Index( + Collate(F("name").desc(), collation=collation), + name="func_collate_f_desc_idx", + ) + # Add index. + with connection.schema_editor() as editor: + editor.add_index(Author, index) + sql = index.create_sql(Author, editor) + table = Author._meta.db_table + self.assertIn(index.name, self.get_constraints(table)) + if connection.features.supports_index_column_ordering: + self.assertIndexOrder(table, index.name, ["DESC"]) + # SQL contains columns and a collation. + self.assertIs(sql.references_column(table, "name"), True) + self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql)) + # Remove index. + with connection.schema_editor() as editor: + editor.remove_index(Author, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_index_calc(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + index = Index(F("height") / (F("weight") + Value(5)), name="func_calc_idx") + # Add index. + with connection.schema_editor() as editor: + editor.add_index(Author, index) + sql = index.create_sql(Author, editor) + table = Author._meta.db_table + self.assertIn(index.name, self.get_constraints(table)) + # SQL contains columns and expressions. + self.assertIs(sql.references_column(table, "height"), True) + self.assertIs(sql.references_column(table, "weight"), True) + sql = str(sql) + self.assertIs( + sql.index(editor.quote_name("height")) + < sql.index("/") + < sql.index(editor.quote_name("weight")) + < sql.index("+") + < sql.index("5"), + True, + ) + # Remove index. + with connection.schema_editor() as editor: + editor.remove_index(Author, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes", "supports_json_field") + @isolate_apps("schema") + def test_func_index_json_key_transform(self): + class JSONModel(Model): + field = JSONField() + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(JSONModel) + self.isolated_local_models = [JSONModel] + index = Index("field__some_key", name="func_json_key_idx") + with connection.schema_editor() as editor: + editor.add_index(JSONModel, index) + sql = index.create_sql(JSONModel, editor) + table = JSONModel._meta.db_table + self.assertIn(index.name, self.get_constraints(table)) + self.assertIs(sql.references_column(table, "field"), True) + with connection.schema_editor() as editor: + editor.remove_index(JSONModel, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipUnlessDBFeature("supports_expression_indexes", "supports_json_field") + @isolate_apps("schema") + def test_func_index_json_key_transform_cast(self): + class JSONModel(Model): + field = JSONField() + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(JSONModel) + self.isolated_local_models = [JSONModel] + index = Index( + Cast(KeyTextTransform("some_key", "field"), IntegerField()), + name="func_json_key_cast_idx", + ) + with connection.schema_editor() as editor: + editor.add_index(JSONModel, index) + sql = index.create_sql(JSONModel, editor) + table = JSONModel._meta.db_table + self.assertIn(index.name, self.get_constraints(table)) + self.assertIs(sql.references_column(table, "field"), True) + with connection.schema_editor() as editor: + editor.remove_index(JSONModel, index) + self.assertNotIn(index.name, self.get_constraints(table)) + + @skipIfDBFeature("supports_expression_indexes") + def test_func_index_unsupported(self): + # Index is ignored on databases that don't support indexes on + # expressions. + with connection.schema_editor() as editor: + editor.create_model(Author) + index = Index(F("name"), name="random_idx") + with connection.schema_editor() as editor, self.assertNumQueries(0): + self.assertIsNone(editor.add_index(Author, index)) + self.assertIsNone(editor.remove_index(Author, index)) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_index_nonexistent_field(self): + index = Index(Lower("nonexistent"), name="func_nonexistent_idx") + msg = ( + "Cannot resolve keyword 'nonexistent' into field. Choices are: " + "height, id, name, uuid, weight" + ) + with self.assertRaisesMessage(FieldError, msg): + with connection.schema_editor() as editor: + editor.add_index(Author, index) + + @skipUnlessDBFeature("supports_expression_indexes") + def test_func_index_nondeterministic(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + index = Index(Random(), name="func_random_idx") + with connection.schema_editor() as editor: + with self.assertRaises(DatabaseError): + editor.add_index(Author, index) + + def test_primary_key(self): + """ + Tests altering of the primary key + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Tag) + # Ensure the table is there and has the right PK + self.assertEqual(self.get_primary_key(Tag._meta.db_table), "id") + # Alter to change the PK + id_field = Tag._meta.get_field("id") + old_field = Tag._meta.get_field("slug") + new_field = SlugField(primary_key=True) + new_field.set_attributes_from_name("slug") + new_field.model = Tag + with connection.schema_editor() as editor: + editor.remove_field(Tag, id_field) + editor.alter_field(Tag, old_field, new_field) + # Ensure the PK changed + self.assertNotIn( + "id", + self.get_indexes(Tag._meta.db_table), + ) + self.assertEqual(self.get_primary_key(Tag._meta.db_table), "slug") + + def test_alter_primary_key_the_same_name(self): + with connection.schema_editor() as editor: + editor.create_model(Thing) + + old_field = Thing._meta.get_field("when") + new_field = CharField(max_length=2, primary_key=True) + new_field.set_attributes_from_name("when") + new_field.model = Thing + with connection.schema_editor() as editor: + editor.alter_field(Thing, old_field, new_field, strict=True) + self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when") + with connection.schema_editor() as editor: + editor.alter_field(Thing, new_field, old_field, strict=True) + self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when") + + def test_context_manager_exit(self): + """ + Ensures transaction is correctly closed when an error occurs + inside a SchemaEditor context. + """ + + class SomeError(Exception): + pass + + try: + with connection.schema_editor(): + raise SomeError + except SomeError: + self.assertFalse(connection.in_atomic_block) + + @skipIfDBFeature("can_rollback_ddl") + def test_unsupported_transactional_ddl_disallowed(self): + message = ( + "Executing DDL statements while in a transaction on databases " + "that can't perform a rollback is prohibited." + ) + with atomic(), connection.schema_editor() as editor: + with self.assertRaisesMessage(TransactionManagementError, message): + editor.execute( + editor.sql_create_table % {"table": "foo", "definition": ""} + ) + + @skipUnlessDBFeature("supports_foreign_keys", "indexes_foreign_keys") + def test_foreign_key_index_long_names_regression(self): + """ + Regression test for #21497. + Only affects databases that supports foreign keys. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(AuthorWithEvenLongerName) + editor.create_model(BookWithLongName) + # Find the properly shortened column name + column_name = connection.ops.quote_name( + "author_foreign_key_with_really_long_field_name_id" + ) + column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase + # Ensure the table is there and has an index on the column + self.assertIn( + column_name, + self.get_indexes(BookWithLongName._meta.db_table), + ) + + @skipUnlessDBFeature("supports_foreign_keys") + def test_add_foreign_key_long_names(self): + """ + Regression test for #23009. + Only affects databases that supports foreign keys. + """ + # Create the initial tables + with connection.schema_editor() as editor: + editor.create_model(AuthorWithEvenLongerName) + editor.create_model(BookWithLongName) + # Add a second FK, this would fail due to long ref name before the fix + new_field = ForeignKey( + AuthorWithEvenLongerName, CASCADE, related_name="something" + ) + new_field.set_attributes_from_name( + "author_other_really_long_named_i_mean_so_long_fk" + ) + with connection.schema_editor() as editor: + editor.add_field(BookWithLongName, new_field) + + @isolate_apps("schema") + @skipUnlessDBFeature("supports_foreign_keys") + def test_add_foreign_key_quoted_db_table(self): + class Author(Model): + class Meta: + db_table = '"table_author_double_quoted"' + app_label = "schema" + + class Book(Model): + author = ForeignKey(Author, CASCADE) + + class Meta: + app_label = "schema" + + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + self.isolated_local_models = [Author] + if connection.vendor == "mysql": + self.assertForeignKeyExists( + Book, "author_id", '"table_author_double_quoted"' + ) + else: + self.assertForeignKeyExists(Book, "author_id", "table_author_double_quoted") + + def test_add_foreign_object(self): + with connection.schema_editor() as editor: + editor.create_model(BookForeignObj) + self.local_models = [BookForeignObj] + + new_field = ForeignObject( + Author, on_delete=CASCADE, from_fields=["author_id"], to_fields=["id"] + ) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.add_field(BookForeignObj, new_field) + + def test_creation_deletion_reserved_names(self): + """ + Tries creating a model's table, and then deleting it when it has a + SQL reserved name. + """ + # Create the table + with connection.schema_editor() as editor: + try: + editor.create_model(Thing) + except OperationalError as e: + self.fail( + "Errors when applying initial migration for a model " + "with a table named after an SQL reserved word: %s" % e + ) + # The table is there + list(Thing.objects.all()) + # Clean up that table + with connection.schema_editor() as editor: + editor.delete_model(Thing) + # The table is gone + with self.assertRaises(DatabaseError): + list(Thing.objects.all()) + + def test_remove_constraints_capital_letters(self): + """ + #23065 - Constraint names must be quoted if they contain capital letters. + """ + + def get_field(*args, field_class=IntegerField, **kwargs): + kwargs["db_column"] = "CamelCase" + field = field_class(*args, **kwargs) + field.set_attributes_from_name("CamelCase") + return field + + model = Author + field = get_field() + table = model._meta.db_table + column = field.column + identifier_converter = connection.introspection.identifier_converter + + with connection.schema_editor() as editor: + editor.create_model(model) + editor.add_field(model, field) + + constraint_name = "CamelCaseIndex" + expected_constraint_name = identifier_converter(constraint_name) + editor.execute( + editor.sql_create_index + % { + "table": editor.quote_name(table), + "name": editor.quote_name(constraint_name), + "using": "", + "columns": editor.quote_name(column), + "extra": "", + "condition": "", + "include": "", + } + ) + self.assertIn( + expected_constraint_name, self.get_constraints(model._meta.db_table) + ) + editor.alter_field(model, get_field(db_index=True), field, strict=True) + self.assertNotIn( + expected_constraint_name, self.get_constraints(model._meta.db_table) + ) + + constraint_name = "CamelCaseUniqConstraint" + expected_constraint_name = identifier_converter(constraint_name) + editor.execute(editor._create_unique_sql(model, [field], constraint_name)) + self.assertIn( + expected_constraint_name, self.get_constraints(model._meta.db_table) + ) + editor.alter_field(model, get_field(unique=True), field, strict=True) + self.assertNotIn( + expected_constraint_name, self.get_constraints(model._meta.db_table) + ) + + if editor.sql_create_fk and connection.features.can_introspect_foreign_keys: + constraint_name = "CamelCaseFKConstraint" + expected_constraint_name = identifier_converter(constraint_name) + editor.execute( + editor.sql_create_fk + % { + "table": editor.quote_name(table), + "name": editor.quote_name(constraint_name), + "column": editor.quote_name(column), + "to_table": editor.quote_name(table), + "to_column": editor.quote_name(model._meta.auto_field.column), + "deferrable": connection.ops.deferrable_sql(), + } + ) + self.assertIn( + expected_constraint_name, self.get_constraints(model._meta.db_table) + ) + editor.alter_field( + model, + get_field(Author, CASCADE, field_class=ForeignKey), + field, + strict=True, + ) + self.assertNotIn( + expected_constraint_name, self.get_constraints(model._meta.db_table) + ) + + def test_add_field_use_effective_default(self): + """ + #23987 - effective_default() should be used as the field default when + adding a new field. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure there's no surname field + columns = self.column_classes(Author) + self.assertNotIn("surname", columns) + # Create a row + Author.objects.create(name="Anonymous1") + # Add new CharField to ensure default will be used from effective_default + new_field = CharField(max_length=15, blank=True) + new_field.set_attributes_from_name("surname") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + # Ensure field was added with the right default + with connection.cursor() as cursor: + cursor.execute("SELECT surname FROM schema_author;") + item = cursor.fetchall()[0] + self.assertEqual( + item[0], + None if connection.features.interprets_empty_strings_as_nulls else "", + ) + + def test_add_field_default_dropped(self): + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure there's no surname field + columns = self.column_classes(Author) + self.assertNotIn("surname", columns) + # Create a row + Author.objects.create(name="Anonymous1") + # Add new CharField with a default + new_field = CharField(max_length=15, blank=True, default="surname default") + new_field.set_attributes_from_name("surname") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + # Ensure field was added with the right default + with connection.cursor() as cursor: + cursor.execute("SELECT surname FROM schema_author;") + item = cursor.fetchall()[0] + self.assertEqual(item[0], "surname default") + # And that the default is no longer set in the database. + field = next( + f + for f in connection.introspection.get_table_description( + cursor, "schema_author" + ) + if f.name == "surname" + ) + if connection.features.can_introspect_default: + self.assertIsNone(field.default) + + def test_add_field_default_nullable(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + # Add new nullable CharField with a default. + new_field = CharField(max_length=15, blank=True, null=True, default="surname") + new_field.set_attributes_from_name("surname") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + Author.objects.create(name="Anonymous1") + with connection.cursor() as cursor: + cursor.execute("SELECT surname FROM schema_author;") + item = cursor.fetchall()[0] + self.assertIsNone(item[0]) + field = next( + f + for f in connection.introspection.get_table_description( + cursor, + "schema_author", + ) + if f.name == "surname" + ) + # Field is still nullable. + self.assertTrue(field.null_ok) + # The database default is no longer set. + if connection.features.can_introspect_default: + self.assertIn(field.default, ["NULL", None]) + + def test_add_textfield_default_nullable(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + # Add new nullable TextField with a default. + new_field = TextField(blank=True, null=True, default="text") + new_field.set_attributes_from_name("description") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + Author.objects.create(name="Anonymous1") + with connection.cursor() as cursor: + cursor.execute("SELECT description FROM schema_author;") + item = cursor.fetchall()[0] + self.assertIsNone(item[0]) + field = next( + f + for f in connection.introspection.get_table_description( + cursor, + "schema_author", + ) + if f.name == "description" + ) + # Field is still nullable. + self.assertTrue(field.null_ok) + # The database default is no longer set. + if connection.features.can_introspect_default: + self.assertIn(field.default, ["NULL", None]) + + def test_alter_field_default_dropped(self): + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Create a row + Author.objects.create(name="Anonymous1") + self.assertIsNone(Author.objects.get().height) + old_field = Author._meta.get_field("height") + # The default from the new field is used in updating existing rows. + new_field = IntegerField(blank=True, default=42) + new_field.set_attributes_from_name("height") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual(Author.objects.get().height, 42) + # The database default should be removed. + with connection.cursor() as cursor: + field = next( + f + for f in connection.introspection.get_table_description( + cursor, "schema_author" + ) + if f.name == "height" + ) + if connection.features.can_introspect_default: + self.assertIsNone(field.default) + + def test_alter_field_default_doesnt_perform_queries(self): + """ + No queries are performed if a field default changes and the field's + not changing from null to non-null. + """ + with connection.schema_editor() as editor: + editor.create_model(AuthorWithDefaultHeight) + old_field = AuthorWithDefaultHeight._meta.get_field("height") + new_default = old_field.default * 2 + new_field = PositiveIntegerField(null=True, blank=True, default=new_default) + new_field.set_attributes_from_name("height") + with connection.schema_editor() as editor, self.assertNumQueries(0): + editor.alter_field( + AuthorWithDefaultHeight, old_field, new_field, strict=True + ) + + @skipUnlessDBFeature("supports_foreign_keys") + def test_alter_field_fk_attributes_noop(self): + """ + No queries are performed when changing field attributes that don't + affect the schema. + """ + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + old_field = Book._meta.get_field("author") + new_field = ForeignKey( + Author, + blank=True, + editable=False, + error_messages={"invalid": "error message"}, + help_text="help text", + limit_choices_to={"limit": "choice"}, + on_delete=PROTECT, + related_name="related_name", + related_query_name="related_query_name", + validators=[lambda x: x], + verbose_name="verbose name", + ) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor, self.assertNumQueries(0): + editor.alter_field(Book, old_field, new_field, strict=True) + with connection.schema_editor() as editor, self.assertNumQueries(0): + editor.alter_field(Book, new_field, old_field, strict=True) + + def test_alter_field_choices_noop(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + old_field = Author._meta.get_field("name") + new_field = CharField( + choices=(("Jane", "Jane"), ("Joe", "Joe")), + max_length=255, + ) + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor, self.assertNumQueries(0): + editor.alter_field(Author, old_field, new_field, strict=True) + with connection.schema_editor() as editor, self.assertNumQueries(0): + editor.alter_field(Author, new_field, old_field, strict=True) + + def test_add_textfield_unhashable_default(self): + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Create a row + Author.objects.create(name="Anonymous1") + # Create a field that has an unhashable default + new_field = TextField(default={}) + new_field.set_attributes_from_name("info") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_add_indexed_charfield(self): + field = CharField(max_length=255, db_index=True) + field.set_attributes_from_name("nom_de_plume") + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.add_field(Author, field) + # Should create two indexes; one for like operator. + self.assertEqual( + self.get_constraints_for_column(Author, "nom_de_plume"), + [ + "schema_author_nom_de_plume_7570a851", + "schema_author_nom_de_plume_7570a851_like", + ], + ) + + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_add_unique_charfield(self): + field = CharField(max_length=255, unique=True) + field.set_attributes_from_name("nom_de_plume") + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.add_field(Author, field) + # Should create two indexes; one for like operator. + self.assertEqual( + self.get_constraints_for_column(Author, "nom_de_plume"), + [ + "schema_author_nom_de_plume_7570a851_like", + "schema_author_nom_de_plume_key", + ], + ) + + @skipUnlessDBFeature("supports_comments") + def test_add_db_comment_charfield(self): + comment = "Custom comment" + field = CharField(max_length=255, db_comment=comment) + field.set_attributes_from_name("name_with_comment") + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.add_field(Author, field) + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name_with_comment"), + comment, + ) + + @skipUnlessDBFeature("supports_comments") + def test_add_db_comment_and_default_charfield(self): + comment = "Custom comment with default" + field = CharField(max_length=255, default="Joe Doe", db_comment=comment) + field.set_attributes_from_name("name_with_comment_default") + with connection.schema_editor() as editor: + editor.create_model(Author) + Author.objects.create(name="Before adding a new field") + editor.add_field(Author, field) + + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name_with_comment_default"), + comment, + ) + with connection.cursor() as cursor: + cursor.execute( + f"SELECT name_with_comment_default FROM {Author._meta.db_table};" + ) + for row in cursor.fetchall(): + self.assertEqual(row[0], "Joe Doe") + + @skipUnlessDBFeature("supports_comments") + def test_alter_db_comment(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + # Add comment. + old_field = Author._meta.get_field("name") + new_field = CharField(max_length=255, db_comment="Custom comment") + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name"), + "Custom comment", + ) + # Alter comment. + old_field = new_field + new_field = CharField(max_length=255, db_comment="New custom comment") + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name"), + "New custom comment", + ) + # Remove comment. + old_field = new_field + new_field = CharField(max_length=255) + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertIn( + self.get_column_comment(Author._meta.db_table, "name"), + [None, ""], + ) + + @skipUnlessDBFeature("supports_comments", "supports_foreign_keys") + def test_alter_db_comment_foreign_key(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + + comment = "FK custom comment" + old_field = Book._meta.get_field("author") + new_field = ForeignKey(Author, CASCADE, db_comment=comment) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field(Book, old_field, new_field, strict=True) + self.assertEqual( + self.get_column_comment(Book._meta.db_table, "author_id"), + comment, + ) + + @skipUnlessDBFeature("supports_comments") + def test_alter_field_type_preserve_comment(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + + comment = "This is the name." + old_field = Author._meta.get_field("name") + new_field = CharField(max_length=255, db_comment=comment) + new_field.set_attributes_from_name("name") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name"), + comment, + ) + # Changing a field type should preserve the comment. + old_field = new_field + new_field = CharField(max_length=511, db_comment=comment) + new_field.set_attributes_from_name("name") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, old_field, strict=True) + # Comment is preserved. + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name"), + comment, + ) + + @isolate_apps("schema") + @skipUnlessDBFeature("supports_comments") + def test_db_comment_table(self): + class ModelWithDbTableComment(Model): + class Meta: + app_label = "schema" + db_table_comment = "Custom table comment" + + with connection.schema_editor() as editor: + editor.create_model(ModelWithDbTableComment) + self.isolated_local_models = [ModelWithDbTableComment] + self.assertEqual( + self.get_table_comment(ModelWithDbTableComment._meta.db_table), + "Custom table comment", + ) + # Alter table comment. + old_db_table_comment = ModelWithDbTableComment._meta.db_table_comment + with connection.schema_editor() as editor: + editor.alter_db_table_comment( + ModelWithDbTableComment, old_db_table_comment, "New table comment" + ) + self.assertEqual( + self.get_table_comment(ModelWithDbTableComment._meta.db_table), + "New table comment", + ) + # Remove table comment. + old_db_table_comment = ModelWithDbTableComment._meta.db_table_comment + with connection.schema_editor() as editor: + editor.alter_db_table_comment( + ModelWithDbTableComment, old_db_table_comment, None + ) + self.assertIn( + self.get_table_comment(ModelWithDbTableComment._meta.db_table), + [None, ""], + ) + + @isolate_apps("schema") + @skipUnlessDBFeature("supports_comments", "supports_foreign_keys") + def test_db_comments_from_abstract_model(self): + class AbstractModelWithDbComments(Model): + name = CharField( + max_length=255, db_comment="Custom comment", null=True, blank=True + ) + + class Meta: + app_label = "schema" + abstract = True + db_table_comment = "Custom table comment" + + class ModelWithDbComments(AbstractModelWithDbComments): + pass + + with connection.schema_editor() as editor: + editor.create_model(ModelWithDbComments) + self.isolated_local_models = [ModelWithDbComments] + + self.assertEqual( + self.get_column_comment(ModelWithDbComments._meta.db_table, "name"), + "Custom comment", + ) + self.assertEqual( + self.get_table_comment(ModelWithDbComments._meta.db_table), + "Custom table comment", + ) + + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_field_add_index_to_charfield(self): + # Create the table and verify no initial indexes. + with connection.schema_editor() as editor: + editor.create_model(Author) + self.assertEqual(self.get_constraints_for_column(Author, "name"), []) + # Alter to add db_index=True and create 2 indexes. + old_field = Author._meta.get_field("name") + new_field = CharField(max_length=255, db_index=True) + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_constraints_for_column(Author, "name"), + ["schema_author_name_1fbc5617", "schema_author_name_1fbc5617_like"], + ) + # Remove db_index=True to drop both indexes. + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, old_field, strict=True) + self.assertEqual(self.get_constraints_for_column(Author, "name"), []) + + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_field_add_unique_to_charfield(self): + # Create the table and verify no initial indexes. + with connection.schema_editor() as editor: + editor.create_model(Author) + self.assertEqual(self.get_constraints_for_column(Author, "name"), []) + # Alter to add unique=True and create 2 indexes. + old_field = Author._meta.get_field("name") + new_field = CharField(max_length=255, unique=True) + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_constraints_for_column(Author, "name"), + ["schema_author_name_1fbc5617_like", "schema_author_name_1fbc5617_uniq"], + ) + # Remove unique=True to drop both indexes. + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, old_field, strict=True) + self.assertEqual(self.get_constraints_for_column(Author, "name"), []) + + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_field_add_index_to_textfield(self): + # Create the table and verify no initial indexes. + with connection.schema_editor() as editor: + editor.create_model(Note) + self.assertEqual(self.get_constraints_for_column(Note, "info"), []) + # Alter to add db_index=True and create 2 indexes. + old_field = Note._meta.get_field("info") + new_field = TextField(db_index=True) + new_field.set_attributes_from_name("info") + with connection.schema_editor() as editor: + editor.alter_field(Note, old_field, new_field, strict=True) + self.assertEqual( + self.get_constraints_for_column(Note, "info"), + ["schema_note_info_4b0ea695", "schema_note_info_4b0ea695_like"], + ) + # Remove db_index=True to drop both indexes. + with connection.schema_editor() as editor: + editor.alter_field(Note, new_field, old_field, strict=True) + self.assertEqual(self.get_constraints_for_column(Note, "info"), []) + + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_field_add_unique_to_charfield_with_db_index(self): + # Create the table and verify initial indexes. + with connection.schema_editor() as editor: + editor.create_model(BookWithoutAuthor) + self.assertEqual( + self.get_constraints_for_column(BookWithoutAuthor, "title"), + ["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"], + ) + # Alter to add unique=True (should replace the index) + old_field = BookWithoutAuthor._meta.get_field("title") + new_field = CharField(max_length=100, db_index=True, unique=True) + new_field.set_attributes_from_name("title") + with connection.schema_editor() as editor: + editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) + self.assertEqual( + self.get_constraints_for_column(BookWithoutAuthor, "title"), + ["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"], + ) + # Alter to remove unique=True (should drop unique index) + new_field2 = CharField(max_length=100, db_index=True) + new_field2.set_attributes_from_name("title") + with connection.schema_editor() as editor: + editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) + self.assertEqual( + self.get_constraints_for_column(BookWithoutAuthor, "title"), + ["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"], + ) + + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_field_remove_unique_and_db_index_from_charfield(self): + # Create the table and verify initial indexes. + with connection.schema_editor() as editor: + editor.create_model(BookWithoutAuthor) + self.assertEqual( + self.get_constraints_for_column(BookWithoutAuthor, "title"), + ["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"], + ) + # Alter to add unique=True (should replace the index) + old_field = BookWithoutAuthor._meta.get_field("title") + new_field = CharField(max_length=100, db_index=True, unique=True) + new_field.set_attributes_from_name("title") + with connection.schema_editor() as editor: + editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) + self.assertEqual( + self.get_constraints_for_column(BookWithoutAuthor, "title"), + ["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"], + ) + # Alter to remove both unique=True and db_index=True (should drop all indexes) + new_field2 = CharField(max_length=100) + new_field2.set_attributes_from_name("title") + with connection.schema_editor() as editor: + editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) + self.assertEqual( + self.get_constraints_for_column(BookWithoutAuthor, "title"), [] + ) + + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_field_swap_unique_and_db_index_with_charfield(self): + # Create the table and verify initial indexes. + with connection.schema_editor() as editor: + editor.create_model(BookWithoutAuthor) + self.assertEqual( + self.get_constraints_for_column(BookWithoutAuthor, "title"), + ["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"], + ) + # Alter to set unique=True and remove db_index=True (should replace the index) + old_field = BookWithoutAuthor._meta.get_field("title") + new_field = CharField(max_length=100, unique=True) + new_field.set_attributes_from_name("title") + with connection.schema_editor() as editor: + editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) + self.assertEqual( + self.get_constraints_for_column(BookWithoutAuthor, "title"), + ["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"], + ) + # Alter to set db_index=True and remove unique=True (should restore index) + new_field2 = CharField(max_length=100, db_index=True) + new_field2.set_attributes_from_name("title") + with connection.schema_editor() as editor: + editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) + self.assertEqual( + self.get_constraints_for_column(BookWithoutAuthor, "title"), + ["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"], + ) + + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") + def test_alter_field_add_db_index_to_charfield_with_unique(self): + # Create the table and verify initial indexes. + with connection.schema_editor() as editor: + editor.create_model(Tag) + self.assertEqual( + self.get_constraints_for_column(Tag, "slug"), + ["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"], + ) + # Alter to add db_index=True + old_field = Tag._meta.get_field("slug") + new_field = SlugField(db_index=True, unique=True) + new_field.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_field(Tag, old_field, new_field, strict=True) + self.assertEqual( + self.get_constraints_for_column(Tag, "slug"), + ["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"], + ) + # Alter to remove db_index=True + new_field2 = SlugField(unique=True) + new_field2.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_field(Tag, new_field, new_field2, strict=True) + self.assertEqual( + self.get_constraints_for_column(Tag, "slug"), + ["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"], + ) + + def test_alter_field_add_index_to_integerfield(self): + # Create the table and verify no initial indexes. + with connection.schema_editor() as editor: + editor.create_model(Author) + self.assertEqual(self.get_constraints_for_column(Author, "weight"), []) + + # Alter to add db_index=True and create index. + old_field = Author._meta.get_field("weight") + new_field = IntegerField(null=True, db_index=True) + new_field.set_attributes_from_name("weight") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_constraints_for_column(Author, "weight"), + ["schema_author_weight_587740f9"], + ) + + # Remove db_index=True to drop index. + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, old_field, strict=True) + self.assertEqual(self.get_constraints_for_column(Author, "weight"), []) + + def test_alter_pk_with_self_referential_field(self): + """ + Changing the primary key field name of a model with a self-referential + foreign key (#26384). + """ + with connection.schema_editor() as editor: + editor.create_model(Node) + old_field = Node._meta.get_field("node_id") + new_field = AutoField(primary_key=True) + new_field.set_attributes_from_name("id") + with connection.schema_editor() as editor: + editor.alter_field(Node, old_field, new_field, strict=True) + self.assertForeignKeyExists(Node, "parent_id", Node._meta.db_table) + + @mock.patch("django.db.backends.base.schema.datetime") + @mock.patch("django.db.backends.base.schema.timezone") + def test_add_datefield_and_datetimefield_use_effective_default( + self, mocked_datetime, mocked_tz + ): + """ + effective_default() should be used for DateField, DateTimeField, and + TimeField if auto_now or auto_now_add is set (#25005). + """ + now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1) + now_tz = datetime.datetime( + month=1, day=1, year=2000, hour=1, minute=1, tzinfo=datetime.timezone.utc + ) + mocked_datetime.now = mock.MagicMock(return_value=now) + mocked_tz.now = mock.MagicMock(return_value=now_tz) + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Check auto_now/auto_now_add attributes are not defined + columns = self.column_classes(Author) + self.assertNotIn("dob_auto_now", columns) + self.assertNotIn("dob_auto_now_add", columns) + self.assertNotIn("dtob_auto_now", columns) + self.assertNotIn("dtob_auto_now_add", columns) + self.assertNotIn("tob_auto_now", columns) + self.assertNotIn("tob_auto_now_add", columns) + # Create a row + Author.objects.create(name="Anonymous1") + # Ensure fields were added with the correct defaults + dob_auto_now = DateField(auto_now=True) + dob_auto_now.set_attributes_from_name("dob_auto_now") + self.check_added_field_default( + editor, + Author, + dob_auto_now, + "dob_auto_now", + now.date(), + cast_function=lambda x: x.date(), + ) + dob_auto_now_add = DateField(auto_now_add=True) + dob_auto_now_add.set_attributes_from_name("dob_auto_now_add") + self.check_added_field_default( + editor, + Author, + dob_auto_now_add, + "dob_auto_now_add", + now.date(), + cast_function=lambda x: x.date(), + ) + dtob_auto_now = DateTimeField(auto_now=True) + dtob_auto_now.set_attributes_from_name("dtob_auto_now") + self.check_added_field_default( + editor, + Author, + dtob_auto_now, + "dtob_auto_now", + now, + ) + dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True) + dt_tm_of_birth_auto_now_add.set_attributes_from_name("dtob_auto_now_add") + self.check_added_field_default( + editor, + Author, + dt_tm_of_birth_auto_now_add, + "dtob_auto_now_add", + now, + ) + tob_auto_now = TimeField(auto_now=True) + tob_auto_now.set_attributes_from_name("tob_auto_now") + self.check_added_field_default( + editor, + Author, + tob_auto_now, + "tob_auto_now", + now.time(), + cast_function=lambda x: x.time(), + ) + tob_auto_now_add = TimeField(auto_now_add=True) + tob_auto_now_add.set_attributes_from_name("tob_auto_now_add") + self.check_added_field_default( + editor, + Author, + tob_auto_now_add, + "tob_auto_now_add", + now.time(), + cast_function=lambda x: x.time(), + ) + + def test_namespaced_db_table_create_index_name(self): + """ + Table names are stripped of their namespace/schema before being used to + generate index names. + """ + with connection.schema_editor() as editor: + max_name_length = connection.ops.max_name_length() or 200 + namespace = "n" * max_name_length + table_name = "t" * max_name_length + namespaced_table_name = '"%s"."%s"' % (namespace, table_name) + self.assertEqual( + editor._create_index_name(table_name, []), + editor._create_index_name(namespaced_table_name, []), + ) + + @unittest.skipUnless( + connection.vendor == "oracle", "Oracle specific db_table syntax" + ) + def test_creation_with_db_table_double_quotes(self): + oracle_user = connection.creation._test_database_user() + + class Student(Model): + name = CharField(max_length=30) + + class Meta: + app_label = "schema" + apps = new_apps + db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user + + class Document(Model): + name = CharField(max_length=30) + students = ManyToManyField(Student) + + class Meta: + app_label = "schema" + apps = new_apps + db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user + + self.isolated_local_models = [Student, Document] + + with connection.schema_editor() as editor: + editor.create_model(Student) + editor.create_model(Document) + + doc = Document.objects.create(name="Test Name") + student = Student.objects.create(name="Some man") + doc.students.add(student) + + @isolate_apps("schema") + @unittest.skipUnless( + connection.vendor == "postgresql", "PostgreSQL specific db_table syntax." + ) + def test_namespaced_db_table_foreign_key_reference(self): + with connection.cursor() as cursor: + cursor.execute("CREATE SCHEMA django_schema_tests") + + def delete_schema(): + with connection.cursor() as cursor: + cursor.execute("DROP SCHEMA django_schema_tests CASCADE") + + self.addCleanup(delete_schema) + + class Author(Model): + class Meta: + app_label = "schema" + + class Book(Model): + class Meta: + app_label = "schema" + db_table = '"django_schema_tests"."schema_book"' + + author = ForeignKey(Author, CASCADE) + author.set_attributes_from_name("author") + + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + editor.add_field(Book, author) + + def test_rename_table_renames_deferred_sql_references(self): + atomic_rename = connection.features.supports_atomic_references_rename + with connection.schema_editor(atomic=atomic_rename) as editor: + editor.create_model(Author) + editor.create_model(Book) + editor.alter_db_table(Author, "schema_author", "schema_renamed_author") + editor.alter_db_table(Author, "schema_book", "schema_renamed_book") + try: + self.assertGreater(len(editor.deferred_sql), 0) + for statement in editor.deferred_sql: + self.assertIs(statement.references_table("schema_author"), False) + self.assertIs(statement.references_table("schema_book"), False) + finally: + editor.alter_db_table(Author, "schema_renamed_author", "schema_author") + editor.alter_db_table(Author, "schema_renamed_book", "schema_book") + + def test_rename_column_renames_deferred_sql_references(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + old_title = Book._meta.get_field("title") + new_title = CharField(max_length=100, db_index=True) + new_title.set_attributes_from_name("renamed_title") + editor.alter_field(Book, old_title, new_title) + old_author = Book._meta.get_field("author") + new_author = ForeignKey(Author, CASCADE) + new_author.set_attributes_from_name("renamed_author") + editor.alter_field(Book, old_author, new_author) + self.assertGreater(len(editor.deferred_sql), 0) + for statement in editor.deferred_sql: + self.assertIs(statement.references_column("book", "title"), False) + self.assertIs(statement.references_column("book", "author_id"), False) + + @isolate_apps("schema") + def test_referenced_field_without_constraint_rename_inside_atomic_block(self): + """ + Foreign keys without database level constraint don't prevent the field + they reference from being renamed in an atomic block. + """ + + class Foo(Model): + field = CharField(max_length=255, unique=True) + + class Meta: + app_label = "schema" + + class Bar(Model): + foo = ForeignKey(Foo, CASCADE, to_field="field", db_constraint=False) + + class Meta: + app_label = "schema" + + self.isolated_local_models = [Foo, Bar] + with connection.schema_editor() as editor: + editor.create_model(Foo) + editor.create_model(Bar) + + new_field = CharField(max_length=255, unique=True) + new_field.set_attributes_from_name("renamed") + with connection.schema_editor(atomic=True) as editor: + editor.alter_field(Foo, Foo._meta.get_field("field"), new_field) + + @isolate_apps("schema") + def test_referenced_table_without_constraint_rename_inside_atomic_block(self): + """ + Foreign keys without database level constraint don't prevent the table + they reference from being renamed in an atomic block. + """ + + class Foo(Model): + field = CharField(max_length=255, unique=True) + + class Meta: + app_label = "schema" + + class Bar(Model): + foo = ForeignKey(Foo, CASCADE, to_field="field", db_constraint=False) + + class Meta: + app_label = "schema" + + self.isolated_local_models = [Foo, Bar] + with connection.schema_editor() as editor: + editor.create_model(Foo) + editor.create_model(Bar) + + new_field = CharField(max_length=255, unique=True) + new_field.set_attributes_from_name("renamed") + with connection.schema_editor(atomic=True) as editor: + editor.alter_db_table(Foo, Foo._meta.db_table, "renamed_table") + Foo._meta.db_table = "renamed_table" + + @isolate_apps("schema") + @skipUnlessDBFeature("supports_collation_on_charfield") + def test_db_collation_charfield(self): + collation = connection.features.test_collations.get("non_default") + if not collation: + self.skipTest("Language collations are not supported.") + + class Foo(Model): + field = CharField(max_length=255, db_collation=collation) + + class Meta: + app_label = "schema" + + self.isolated_local_models = [Foo] + with connection.schema_editor() as editor: + editor.create_model(Foo) + + self.assertEqual( + self.get_column_collation(Foo._meta.db_table, "field"), + collation, + ) + + @isolate_apps("schema") + @skipUnlessDBFeature("supports_collation_on_textfield") + def test_db_collation_textfield(self): + collation = connection.features.test_collations.get("non_default") + if not collation: + self.skipTest("Language collations are not supported.") + + class Foo(Model): + field = TextField(db_collation=collation) + + class Meta: + app_label = "schema" + + self.isolated_local_models = [Foo] + with connection.schema_editor() as editor: + editor.create_model(Foo) + + self.assertEqual( + self.get_column_collation(Foo._meta.db_table, "field"), + collation, + ) + + @skipUnlessDBFeature("supports_collation_on_charfield") + def test_add_field_db_collation(self): + collation = connection.features.test_collations.get("non_default") + if not collation: + self.skipTest("Language collations are not supported.") + + with connection.schema_editor() as editor: + editor.create_model(Author) + + new_field = CharField(max_length=255, db_collation=collation) + new_field.set_attributes_from_name("alias") + with connection.schema_editor() as editor: + editor.add_field(Author, new_field) + columns = self.column_classes(Author) + self.assertEqual( + columns["alias"][0], + connection.features.introspected_field_types["CharField"], + ) + self.assertEqual(columns["alias"][1][8], collation) + + @skipUnlessDBFeature("supports_collation_on_charfield") + def test_alter_field_db_collation(self): + collation = connection.features.test_collations.get("non_default") + if not collation: + self.skipTest("Language collations are not supported.") + + with connection.schema_editor() as editor: + editor.create_model(Author) + + old_field = Author._meta.get_field("name") + new_field = CharField(max_length=255, db_collation=collation) + new_field.set_attributes_from_name("name") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_column_collation(Author._meta.db_table, "name"), + collation, + ) + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, old_field, strict=True) + self.assertIsNone(self.get_column_collation(Author._meta.db_table, "name")) + + @skipUnlessDBFeature("supports_collation_on_charfield") + def test_alter_field_type_preserve_db_collation(self): + collation = connection.features.test_collations.get("non_default") + if not collation: + self.skipTest("Language collations are not supported.") + + with connection.schema_editor() as editor: + editor.create_model(Author) + + old_field = Author._meta.get_field("name") + new_field = CharField(max_length=255, db_collation=collation) + new_field.set_attributes_from_name("name") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_column_collation(Author._meta.db_table, "name"), + collation, + ) + # Changing a field type should preserve the collation. + old_field = new_field + new_field = CharField(max_length=511, db_collation=collation) + new_field.set_attributes_from_name("name") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, old_field, strict=True) + # Collation is preserved. + self.assertEqual( + self.get_column_collation(Author._meta.db_table, "name"), + collation, + ) + + @skipUnlessDBFeature("supports_collation_on_charfield") + def test_alter_primary_key_db_collation(self): + collation = connection.features.test_collations.get("non_default") + if not collation: + self.skipTest("Language collations are not supported.") + + with connection.schema_editor() as editor: + editor.create_model(Thing) + + old_field = Thing._meta.get_field("when") + new_field = CharField(max_length=1, db_collation=collation, primary_key=True) + new_field.set_attributes_from_name("when") + new_field.model = Thing + with connection.schema_editor() as editor: + editor.alter_field(Thing, old_field, new_field, strict=True) + self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when") + self.assertEqual( + self.get_column_collation(Thing._meta.db_table, "when"), + collation, + ) + with connection.schema_editor() as editor: + editor.alter_field(Thing, new_field, old_field, strict=True) + self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when") + self.assertIsNone(self.get_column_collation(Thing._meta.db_table, "when")) + + @skipUnlessDBFeature( + "supports_collation_on_charfield", "supports_collation_on_textfield" + ) + def test_alter_field_type_and_db_collation(self): + collation = connection.features.test_collations.get("non_default") + if not collation: + self.skipTest("Language collations are not supported.") + + with connection.schema_editor() as editor: + editor.create_model(Note) + + old_field = Note._meta.get_field("info") + new_field = CharField(max_length=255, db_collation=collation) + new_field.set_attributes_from_name("info") + new_field.model = Note + with connection.schema_editor() as editor: + editor.alter_field(Note, old_field, new_field, strict=True) + columns = self.column_classes(Note) + self.assertEqual( + columns["info"][0], + connection.features.introspected_field_types["CharField"], + ) + self.assertEqual(columns["info"][1][8], collation) + with connection.schema_editor() as editor: + editor.alter_field(Note, new_field, old_field, strict=True) + columns = self.column_classes(Note) + self.assertEqual(columns["info"][0], "TextField") + self.assertIsNone(columns["info"][1][8]) + + @skipUnlessDBFeature( + "supports_collation_on_charfield", + "supports_non_deterministic_collations", + ) + def test_ci_cs_db_collation(self): + cs_collation = connection.features.test_collations.get("cs") + ci_collation = connection.features.test_collations.get("ci") + try: + if connection.vendor == "mysql": + cs_collation = "latin1_general_cs" + elif connection.vendor == "postgresql": + cs_collation = "en-x-icu" + with connection.cursor() as cursor: + cursor.execute( + "CREATE COLLATION IF NOT EXISTS case_insensitive " + "(provider = icu, locale = 'und-u-ks-level2', " + "deterministic = false)" + ) + ci_collation = "case_insensitive" + # Create the table. + with connection.schema_editor() as editor: + editor.create_model(Author) + # Case-insensitive collation. + old_field = Author._meta.get_field("name") + new_field_ci = CharField(max_length=255, db_collation=ci_collation) + new_field_ci.set_attributes_from_name("name") + new_field_ci.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field_ci, strict=True) + Author.objects.create(name="ANDREW") + self.assertIs(Author.objects.filter(name="Andrew").exists(), True) + # Case-sensitive collation. + new_field_cs = CharField(max_length=255, db_collation=cs_collation) + new_field_cs.set_attributes_from_name("name") + new_field_cs.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field_ci, new_field_cs, strict=True) + self.assertIs(Author.objects.filter(name="Andrew").exists(), False) + finally: + if connection.vendor == "postgresql": + with connection.cursor() as cursor: + cursor.execute("DROP COLLATION IF EXISTS case_insensitive") diff --git a/testbed/django__django/tests/select_for_update/__init__.py b/testbed/django__django/tests/select_for_update/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/select_for_update/models.py b/testbed/django__django/tests/select_for_update/models.py new file mode 100644 index 0000000000000000000000000000000000000000..c1b42f026ddc7389a625158768dde2e23be9efee --- /dev/null +++ b/testbed/django__django/tests/select_for_update/models.py @@ -0,0 +1,47 @@ +from django.db import models + + +class Entity(models.Model): + pass + + +class Country(Entity): + name = models.CharField(max_length=30) + + +class EUCountry(Country): + join_date = models.DateField() + + +class City(models.Model): + name = models.CharField(max_length=30) + country = models.ForeignKey(Country, models.CASCADE) + + +class EUCity(models.Model): + name = models.CharField(max_length=30) + country = models.ForeignKey(EUCountry, models.CASCADE) + + +class CountryProxy(Country): + class Meta: + proxy = True + + +class CountryProxyProxy(CountryProxy): + class Meta: + proxy = True + + +class CityCountryProxy(models.Model): + country = models.ForeignKey(CountryProxyProxy, models.CASCADE) + + +class Person(models.Model): + name = models.CharField(max_length=30) + born = models.ForeignKey(City, models.CASCADE, related_name="+") + died = models.ForeignKey(City, models.CASCADE, related_name="+") + + +class PersonProfile(models.Model): + person = models.OneToOneField(Person, models.CASCADE, related_name="profile") diff --git a/testbed/django__django/tests/select_for_update/tests.py b/testbed/django__django/tests/select_for_update/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..e8ba8f8b6e4419347f4cffed249a72e8ce8492a1 --- /dev/null +++ b/testbed/django__django/tests/select_for_update/tests.py @@ -0,0 +1,665 @@ +import threading +import time +from unittest import mock + +from multiple_database.routers import TestRouter + +from django.core.exceptions import FieldError +from django.db import ( + DatabaseError, + NotSupportedError, + connection, + connections, + router, + transaction, +) +from django.test import ( + TransactionTestCase, + override_settings, + skipIfDBFeature, + skipUnlessDBFeature, +) +from django.test.utils import CaptureQueriesContext + +from .models import ( + City, + CityCountryProxy, + Country, + EUCity, + EUCountry, + Person, + PersonProfile, +) + + +class SelectForUpdateTests(TransactionTestCase): + available_apps = ["select_for_update"] + + def setUp(self): + # This is executed in autocommit mode so that code in + # run_select_for_update can see this data. + self.country1 = Country.objects.create(name="Belgium") + self.country2 = Country.objects.create(name="France") + self.city1 = City.objects.create(name="Liberchies", country=self.country1) + self.city2 = City.objects.create(name="Samois-sur-Seine", country=self.country2) + self.person = Person.objects.create( + name="Reinhardt", born=self.city1, died=self.city2 + ) + self.person_profile = PersonProfile.objects.create(person=self.person) + + # We need another database connection in transaction to test that one + # connection issuing a SELECT ... FOR UPDATE will block. + self.new_connection = connection.copy() + + def tearDown(self): + try: + self.end_blocking_transaction() + except (DatabaseError, AttributeError): + pass + self.new_connection.close() + + def start_blocking_transaction(self): + self.new_connection.set_autocommit(False) + # Start a blocking transaction. At some point, + # end_blocking_transaction() should be called. + self.cursor = self.new_connection.cursor() + sql = "SELECT * FROM %(db_table)s %(for_update)s;" % { + "db_table": Person._meta.db_table, + "for_update": self.new_connection.ops.for_update_sql(), + } + self.cursor.execute(sql, ()) + self.cursor.fetchone() + + def end_blocking_transaction(self): + # Roll back the blocking transaction. + self.cursor.close() + self.new_connection.rollback() + self.new_connection.set_autocommit(True) + + def has_for_update_sql(self, queries, **kwargs): + # Examine the SQL that was executed to determine whether it + # contains the 'SELECT..FOR UPDATE' stanza. + for_update_sql = connection.ops.for_update_sql(**kwargs) + return any(for_update_sql in query["sql"] for query in queries) + + @skipUnlessDBFeature("has_select_for_update") + def test_for_update_sql_generated(self): + """ + The backend's FOR UPDATE variant appears in + generated SQL when select_for_update is invoked. + """ + with transaction.atomic(), CaptureQueriesContext(connection) as ctx: + list(Person.objects.select_for_update()) + self.assertTrue(self.has_for_update_sql(ctx.captured_queries)) + + @skipUnlessDBFeature("has_select_for_update_nowait") + def test_for_update_sql_generated_nowait(self): + """ + The backend's FOR UPDATE NOWAIT variant appears in + generated SQL when select_for_update is invoked. + """ + with transaction.atomic(), CaptureQueriesContext(connection) as ctx: + list(Person.objects.select_for_update(nowait=True)) + self.assertTrue(self.has_for_update_sql(ctx.captured_queries, nowait=True)) + + @skipUnlessDBFeature("has_select_for_update_skip_locked") + def test_for_update_sql_generated_skip_locked(self): + """ + The backend's FOR UPDATE SKIP LOCKED variant appears in + generated SQL when select_for_update is invoked. + """ + with transaction.atomic(), CaptureQueriesContext(connection) as ctx: + list(Person.objects.select_for_update(skip_locked=True)) + self.assertTrue(self.has_for_update_sql(ctx.captured_queries, skip_locked=True)) + + @skipUnlessDBFeature("has_select_for_no_key_update") + def test_update_sql_generated_no_key(self): + """ + The backend's FOR NO KEY UPDATE variant appears in generated SQL when + select_for_update() is invoked. + """ + with transaction.atomic(), CaptureQueriesContext(connection) as ctx: + list(Person.objects.select_for_update(no_key=True)) + self.assertIs(self.has_for_update_sql(ctx.captured_queries, no_key=True), True) + + @skipUnlessDBFeature("has_select_for_update_of") + def test_for_update_sql_generated_of(self): + """ + The backend's FOR UPDATE OF variant appears in the generated SQL when + select_for_update() is invoked. + """ + with transaction.atomic(), CaptureQueriesContext(connection) as ctx: + list( + Person.objects.select_related( + "born__country", + ) + .select_for_update( + of=("born__country",), + ) + .select_for_update(of=("self", "born__country")) + ) + features = connections["default"].features + if features.select_for_update_of_column: + expected = [ + 'select_for_update_person"."id', + 'select_for_update_country"."entity_ptr_id', + ] + else: + expected = ["select_for_update_person", "select_for_update_country"] + expected = [connection.ops.quote_name(value) for value in expected] + self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) + + @skipUnlessDBFeature("has_select_for_update_of") + def test_for_update_sql_model_inheritance_generated_of(self): + with transaction.atomic(), CaptureQueriesContext(connection) as ctx: + list(EUCountry.objects.select_for_update(of=("self",))) + if connection.features.select_for_update_of_column: + expected = ['select_for_update_eucountry"."country_ptr_id'] + else: + expected = ["select_for_update_eucountry"] + expected = [connection.ops.quote_name(value) for value in expected] + self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) + + @skipUnlessDBFeature("has_select_for_update_of") + def test_for_update_sql_model_inheritance_ptr_generated_of(self): + with transaction.atomic(), CaptureQueriesContext(connection) as ctx: + list( + EUCountry.objects.select_for_update( + of=( + "self", + "country_ptr", + ) + ) + ) + if connection.features.select_for_update_of_column: + expected = [ + 'select_for_update_eucountry"."country_ptr_id', + 'select_for_update_country"."entity_ptr_id', + ] + else: + expected = ["select_for_update_eucountry", "select_for_update_country"] + expected = [connection.ops.quote_name(value) for value in expected] + self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) + + @skipUnlessDBFeature("has_select_for_update_of") + def test_for_update_sql_related_model_inheritance_generated_of(self): + with transaction.atomic(), CaptureQueriesContext(connection) as ctx: + list( + EUCity.objects.select_related("country").select_for_update( + of=("self", "country"), + ) + ) + if connection.features.select_for_update_of_column: + expected = [ + 'select_for_update_eucity"."id', + 'select_for_update_eucountry"."country_ptr_id', + ] + else: + expected = ["select_for_update_eucity", "select_for_update_eucountry"] + expected = [connection.ops.quote_name(value) for value in expected] + self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) + + @skipUnlessDBFeature("has_select_for_update_of") + def test_for_update_sql_model_inheritance_nested_ptr_generated_of(self): + with transaction.atomic(), CaptureQueriesContext(connection) as ctx: + list( + EUCity.objects.select_related("country").select_for_update( + of=( + "self", + "country__country_ptr", + ), + ) + ) + if connection.features.select_for_update_of_column: + expected = [ + 'select_for_update_eucity"."id', + 'select_for_update_country"."entity_ptr_id', + ] + else: + expected = ["select_for_update_eucity", "select_for_update_country"] + expected = [connection.ops.quote_name(value) for value in expected] + self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) + + @skipUnlessDBFeature("has_select_for_update_of") + def test_for_update_sql_multilevel_model_inheritance_ptr_generated_of(self): + with transaction.atomic(), CaptureQueriesContext(connection) as ctx: + list( + EUCountry.objects.select_for_update( + of=("country_ptr", "country_ptr__entity_ptr"), + ) + ) + if connection.features.select_for_update_of_column: + expected = [ + 'select_for_update_country"."entity_ptr_id', + 'select_for_update_entity"."id', + ] + else: + expected = ["select_for_update_country", "select_for_update_entity"] + expected = [connection.ops.quote_name(value) for value in expected] + self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) + + @skipUnlessDBFeature("has_select_for_update_of") + def test_for_update_sql_model_proxy_generated_of(self): + with transaction.atomic(), CaptureQueriesContext(connection) as ctx: + list( + CityCountryProxy.objects.select_related("country").select_for_update( + of=("country",), + ) + ) + if connection.features.select_for_update_of_column: + expected = ['select_for_update_country"."entity_ptr_id'] + else: + expected = ["select_for_update_country"] + expected = [connection.ops.quote_name(value) for value in expected] + self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) + + @skipUnlessDBFeature("has_select_for_update_of") + def test_for_update_of_followed_by_values(self): + with transaction.atomic(): + values = list(Person.objects.select_for_update(of=("self",)).values("pk")) + self.assertEqual(values, [{"pk": self.person.pk}]) + + @skipUnlessDBFeature("has_select_for_update_of") + def test_for_update_of_followed_by_values_list(self): + with transaction.atomic(): + values = list( + Person.objects.select_for_update(of=("self",)).values_list("pk") + ) + self.assertEqual(values, [(self.person.pk,)]) + + @skipUnlessDBFeature("has_select_for_update_of") + def test_for_update_of_self_when_self_is_not_selected(self): + """ + select_for_update(of=['self']) when the only columns selected are from + related tables. + """ + with transaction.atomic(): + values = list( + Person.objects.select_related("born") + .select_for_update(of=("self",)) + .values("born__name") + ) + self.assertEqual(values, [{"born__name": self.city1.name}]) + + @skipUnlessDBFeature( + "has_select_for_update_of", + "supports_select_for_update_with_limit", + ) + def test_for_update_of_with_exists(self): + with transaction.atomic(): + qs = Person.objects.select_for_update(of=("self", "born")) + self.assertIs(qs.exists(), True) + + @skipUnlessDBFeature("has_select_for_update_nowait", "supports_transactions") + def test_nowait_raises_error_on_block(self): + """ + If nowait is specified, we expect an error to be raised rather + than blocking. + """ + self.start_blocking_transaction() + status = [] + + thread = threading.Thread( + target=self.run_select_for_update, + args=(status,), + kwargs={"nowait": True}, + ) + + thread.start() + time.sleep(1) + thread.join() + self.end_blocking_transaction() + self.assertIsInstance(status[-1], DatabaseError) + + @skipUnlessDBFeature("has_select_for_update_skip_locked", "supports_transactions") + def test_skip_locked_skips_locked_rows(self): + """ + If skip_locked is specified, the locked row is skipped resulting in + Person.DoesNotExist. + """ + self.start_blocking_transaction() + status = [] + thread = threading.Thread( + target=self.run_select_for_update, + args=(status,), + kwargs={"skip_locked": True}, + ) + thread.start() + time.sleep(1) + thread.join() + self.end_blocking_transaction() + self.assertIsInstance(status[-1], Person.DoesNotExist) + + @skipIfDBFeature("has_select_for_update_nowait") + @skipUnlessDBFeature("has_select_for_update") + def test_unsupported_nowait_raises_error(self): + """ + NotSupportedError is raised if a SELECT...FOR UPDATE NOWAIT is run on + a database backend that supports FOR UPDATE but not NOWAIT. + """ + with self.assertRaisesMessage( + NotSupportedError, "NOWAIT is not supported on this database backend." + ): + with transaction.atomic(): + Person.objects.select_for_update(nowait=True).get() + + @skipIfDBFeature("has_select_for_update_skip_locked") + @skipUnlessDBFeature("has_select_for_update") + def test_unsupported_skip_locked_raises_error(self): + """ + NotSupportedError is raised if a SELECT...FOR UPDATE SKIP LOCKED is run + on a database backend that supports FOR UPDATE but not SKIP LOCKED. + """ + with self.assertRaisesMessage( + NotSupportedError, "SKIP LOCKED is not supported on this database backend." + ): + with transaction.atomic(): + Person.objects.select_for_update(skip_locked=True).get() + + @skipIfDBFeature("has_select_for_update_of") + @skipUnlessDBFeature("has_select_for_update") + def test_unsupported_of_raises_error(self): + """ + NotSupportedError is raised if a SELECT...FOR UPDATE OF... is run on + a database backend that supports FOR UPDATE but not OF. + """ + msg = "FOR UPDATE OF is not supported on this database backend." + with self.assertRaisesMessage(NotSupportedError, msg): + with transaction.atomic(): + Person.objects.select_for_update(of=("self",)).get() + + @skipIfDBFeature("has_select_for_no_key_update") + @skipUnlessDBFeature("has_select_for_update") + def test_unsuported_no_key_raises_error(self): + """ + NotSupportedError is raised if a SELECT...FOR NO KEY UPDATE... is run + on a database backend that supports FOR UPDATE but not NO KEY. + """ + msg = "FOR NO KEY UPDATE is not supported on this database backend." + with self.assertRaisesMessage(NotSupportedError, msg): + with transaction.atomic(): + Person.objects.select_for_update(no_key=True).get() + + @skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of") + def test_unrelated_of_argument_raises_error(self): + """ + FieldError is raised if a non-relation field is specified in of=(...). + """ + msg = ( + "Invalid field name(s) given in select_for_update(of=(...)): %s. " + "Only relational fields followed in the query are allowed. " + "Choices are: self, born, born__country, " + "born__country__entity_ptr." + ) + invalid_of = [ + ("nonexistent",), + ("name",), + ("born__nonexistent",), + ("born__name",), + ("born__nonexistent", "born__name"), + ] + for of in invalid_of: + with self.subTest(of=of): + with self.assertRaisesMessage(FieldError, msg % ", ".join(of)): + with transaction.atomic(): + Person.objects.select_related( + "born__country" + ).select_for_update(of=of).get() + + @skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of") + def test_related_but_unselected_of_argument_raises_error(self): + """ + FieldError is raised if a relation field that is not followed in the + query is specified in of=(...). + """ + msg = ( + "Invalid field name(s) given in select_for_update(of=(...)): %s. " + "Only relational fields followed in the query are allowed. " + "Choices are: self, born, profile." + ) + for name in ["born__country", "died", "died__country"]: + with self.subTest(name=name): + with self.assertRaisesMessage(FieldError, msg % name): + with transaction.atomic(): + Person.objects.select_related("born", "profile").exclude( + profile=None + ).select_for_update(of=(name,)).get() + + @skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of") + def test_model_inheritance_of_argument_raises_error_ptr_in_choices(self): + msg = ( + "Invalid field name(s) given in select_for_update(of=(...)): " + "name. Only relational fields followed in the query are allowed. " + "Choices are: self, %s." + ) + with self.assertRaisesMessage( + FieldError, + msg % "country, country__country_ptr, country__country_ptr__entity_ptr", + ): + with transaction.atomic(): + EUCity.objects.select_related( + "country", + ).select_for_update(of=("name",)).get() + with self.assertRaisesMessage( + FieldError, msg % "country_ptr, country_ptr__entity_ptr" + ): + with transaction.atomic(): + EUCountry.objects.select_for_update(of=("name",)).get() + + @skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of") + def test_model_proxy_of_argument_raises_error_proxy_field_in_choices(self): + msg = ( + "Invalid field name(s) given in select_for_update(of=(...)): " + "name. Only relational fields followed in the query are allowed. " + "Choices are: self, country, country__entity_ptr." + ) + with self.assertRaisesMessage(FieldError, msg): + with transaction.atomic(): + CityCountryProxy.objects.select_related( + "country", + ).select_for_update(of=("name",)).get() + + @skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of") + def test_reverse_one_to_one_of_arguments(self): + """ + Reverse OneToOneFields may be included in of=(...) as long as NULLs + are excluded because LEFT JOIN isn't allowed in SELECT FOR UPDATE. + """ + with transaction.atomic(): + person = ( + Person.objects.select_related( + "profile", + ) + .exclude(profile=None) + .select_for_update(of=("profile",)) + .get() + ) + self.assertEqual(person.profile, self.person_profile) + + @skipUnlessDBFeature("has_select_for_update") + def test_for_update_after_from(self): + features_class = connections["default"].features.__class__ + attribute_to_patch = "%s.%s.for_update_after_from" % ( + features_class.__module__, + features_class.__name__, + ) + with mock.patch(attribute_to_patch, return_value=True): + with transaction.atomic(): + self.assertIn( + "FOR UPDATE WHERE", + str(Person.objects.filter(name="foo").select_for_update().query), + ) + + @skipUnlessDBFeature("has_select_for_update", "supports_transactions") + def test_for_update_requires_transaction(self): + """ + A TransactionManagementError is raised + when a select_for_update query is executed outside of a transaction. + """ + msg = "select_for_update cannot be used outside of a transaction." + with self.assertRaisesMessage(transaction.TransactionManagementError, msg): + list(Person.objects.select_for_update()) + + @skipUnlessDBFeature("has_select_for_update", "supports_transactions") + def test_for_update_requires_transaction_only_in_execution(self): + """ + No TransactionManagementError is raised + when select_for_update is invoked outside of a transaction - + only when the query is executed. + """ + people = Person.objects.select_for_update() + msg = "select_for_update cannot be used outside of a transaction." + with self.assertRaisesMessage(transaction.TransactionManagementError, msg): + list(people) + + @skipUnlessDBFeature("supports_select_for_update_with_limit") + def test_select_for_update_with_limit(self): + other = Person.objects.create(name="Grappeli", born=self.city1, died=self.city2) + with transaction.atomic(): + qs = list(Person.objects.order_by("pk").select_for_update()[1:2]) + self.assertEqual(qs[0], other) + + @skipIfDBFeature("supports_select_for_update_with_limit") + def test_unsupported_select_for_update_with_limit(self): + msg = ( + "LIMIT/OFFSET is not supported with select_for_update on this database " + "backend." + ) + with self.assertRaisesMessage(NotSupportedError, msg): + with transaction.atomic(): + list(Person.objects.order_by("pk").select_for_update()[1:2]) + + def run_select_for_update(self, status, **kwargs): + """ + Utility method that runs a SELECT FOR UPDATE against all + Person instances. After the select_for_update, it attempts + to update the name of the only record, save, and commit. + + This function expects to run in a separate thread. + """ + status.append("started") + try: + # We need to enter transaction management again, as this is done on + # per-thread basis + with transaction.atomic(): + person = Person.objects.select_for_update(**kwargs).get() + person.name = "Fred" + person.save() + except (DatabaseError, Person.DoesNotExist) as e: + status.append(e) + finally: + # This method is run in a separate thread. It uses its own + # database connection. Close it without waiting for the GC. + connection.close() + + @skipUnlessDBFeature("has_select_for_update") + @skipUnlessDBFeature("supports_transactions") + def test_block(self): + """ + A thread running a select_for_update that accesses rows being touched + by a similar operation on another connection blocks correctly. + """ + # First, let's start the transaction in our thread. + self.start_blocking_transaction() + + # Now, try it again using the ORM's select_for_update + # facility. Do this in a separate thread. + status = [] + thread = threading.Thread(target=self.run_select_for_update, args=(status,)) + + # The thread should immediately block, but we'll sleep + # for a bit to make sure. + thread.start() + sanity_count = 0 + while len(status) != 1 and sanity_count < 10: + sanity_count += 1 + time.sleep(1) + if sanity_count >= 10: + raise ValueError("Thread did not run and block") + + # Check the person hasn't been updated. Since this isn't + # using FOR UPDATE, it won't block. + p = Person.objects.get(pk=self.person.pk) + self.assertEqual("Reinhardt", p.name) + + # When we end our blocking transaction, our thread should + # be able to continue. + self.end_blocking_transaction() + thread.join(5.0) + + # Check the thread has finished. Assuming it has, we should + # find that it has updated the person's name. + self.assertFalse(thread.is_alive()) + + # We must commit the transaction to ensure that MySQL gets a fresh read, + # since by default it runs in REPEATABLE READ mode + transaction.commit() + + p = Person.objects.get(pk=self.person.pk) + self.assertEqual("Fred", p.name) + + @skipUnlessDBFeature("has_select_for_update", "supports_transactions") + def test_raw_lock_not_available(self): + """ + Running a raw query which can't obtain a FOR UPDATE lock raises + the correct exception + """ + self.start_blocking_transaction() + + def raw(status): + try: + list( + Person.objects.raw( + "SELECT * FROM %s %s" + % ( + Person._meta.db_table, + connection.ops.for_update_sql(nowait=True), + ) + ) + ) + except DatabaseError as e: + status.append(e) + finally: + # This method is run in a separate thread. It uses its own + # database connection. Close it without waiting for the GC. + # Connection cannot be closed on Oracle because cursor is still + # open. + if connection.vendor != "oracle": + connection.close() + + status = [] + thread = threading.Thread(target=raw, kwargs={"status": status}) + thread.start() + time.sleep(1) + thread.join() + self.end_blocking_transaction() + self.assertIsInstance(status[-1], DatabaseError) + + @skipUnlessDBFeature("has_select_for_update") + @override_settings(DATABASE_ROUTERS=[TestRouter()]) + def test_select_for_update_on_multidb(self): + query = Person.objects.select_for_update() + self.assertEqual(router.db_for_write(Person), query.db) + + @skipUnlessDBFeature("has_select_for_update") + def test_select_for_update_with_get(self): + with transaction.atomic(): + person = Person.objects.select_for_update().get(name="Reinhardt") + self.assertEqual(person.name, "Reinhardt") + + def test_nowait_and_skip_locked(self): + with self.assertRaisesMessage( + ValueError, "The nowait option cannot be used with skip_locked." + ): + Person.objects.select_for_update(nowait=True, skip_locked=True) + + def test_ordered_select_for_update(self): + """ + Subqueries should respect ordering as an ORDER BY clause may be useful + to specify a row locking order to prevent deadlocks (#27193). + """ + with transaction.atomic(): + qs = Person.objects.filter( + id__in=Person.objects.order_by("-id").select_for_update() + ) + self.assertIn("ORDER BY", str(qs.query)) diff --git a/testbed/django__django/tests/select_related/__init__.py b/testbed/django__django/tests/select_related/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/select_related/models.py b/testbed/django__django/tests/select_related/models.py new file mode 100644 index 0000000000000000000000000000000000000000..d407dbdb110d6557449c0be8f1bf041d6ee1759f --- /dev/null +++ b/testbed/django__django/tests/select_related/models.py @@ -0,0 +1,86 @@ +""" +Tests for select_related() + +``select_related()`` follows all relationships and pre-caches any foreign key +values so that complex trees can be fetched in a single query. However, this +isn't always a good idea, so the ``depth`` argument control how many "levels" +the select-related behavior will traverse. +""" + +from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation +from django.contrib.contenttypes.models import ContentType +from django.db import models + +# Who remembers high school biology? + + +class Domain(models.Model): + name = models.CharField(max_length=50) + + +class Kingdom(models.Model): + name = models.CharField(max_length=50) + domain = models.ForeignKey(Domain, models.CASCADE) + + +class Phylum(models.Model): + name = models.CharField(max_length=50) + kingdom = models.ForeignKey(Kingdom, models.CASCADE) + + +class Klass(models.Model): + name = models.CharField(max_length=50) + phylum = models.ForeignKey(Phylum, models.CASCADE) + + +class Order(models.Model): + name = models.CharField(max_length=50) + klass = models.ForeignKey(Klass, models.CASCADE) + + +class Family(models.Model): + name = models.CharField(max_length=50) + order = models.ForeignKey(Order, models.CASCADE) + + +class Genus(models.Model): + name = models.CharField(max_length=50) + family = models.ForeignKey(Family, models.CASCADE) + + +class Species(models.Model): + name = models.CharField(max_length=50) + genus = models.ForeignKey(Genus, models.CASCADE) + + +# and we'll invent a new thing so we have a model with two foreign keys + + +class HybridSpecies(models.Model): + name = models.CharField(max_length=50) + parent_1 = models.ForeignKey(Species, models.CASCADE, related_name="child_1") + parent_2 = models.ForeignKey(Species, models.CASCADE, related_name="child_2") + + +class Topping(models.Model): + name = models.CharField(max_length=30) + + +class Pizza(models.Model): + name = models.CharField(max_length=100) + toppings = models.ManyToManyField(Topping) + + +class TaggedItem(models.Model): + tag = models.CharField(max_length=30) + + content_type = models.ForeignKey( + ContentType, models.CASCADE, related_name="select_related_tagged_items" + ) + object_id = models.PositiveIntegerField() + content_object = GenericForeignKey("content_type", "object_id") + + +class Bookmark(models.Model): + url = models.URLField() + tags = GenericRelation(TaggedItem) diff --git a/testbed/django__django/tests/select_related/tests.py b/testbed/django__django/tests/select_related/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..68fe7a906fb1260888c3ac1d380d70156e3e9202 --- /dev/null +++ b/testbed/django__django/tests/select_related/tests.py @@ -0,0 +1,284 @@ +from django.core.exceptions import FieldError +from django.test import SimpleTestCase, TestCase + +from .models import ( + Bookmark, + Domain, + Family, + Genus, + HybridSpecies, + Kingdom, + Klass, + Order, + Phylum, + Pizza, + Species, + TaggedItem, +) + + +class SelectRelatedTests(TestCase): + @classmethod + def create_tree(cls, stringtree): + """ + Helper to create a complete tree. + """ + names = stringtree.split() + models = [Domain, Kingdom, Phylum, Klass, Order, Family, Genus, Species] + assert len(names) == len(models), (names, models) + + parent = None + for name, model in zip(names, models): + try: + obj = model.objects.get(name=name) + except model.DoesNotExist: + obj = model(name=name) + if parent: + setattr(obj, parent.__class__.__name__.lower(), parent) + obj.save() + parent = obj + + @classmethod + def setUpTestData(cls): + cls.create_tree( + "Eukaryota Animalia Anthropoda Insecta Diptera Drosophilidae Drosophila " + "melanogaster" + ) + cls.create_tree( + "Eukaryota Animalia Chordata Mammalia Primates Hominidae Homo sapiens" + ) + cls.create_tree( + "Eukaryota Plantae Magnoliophyta Magnoliopsida Fabales Fabaceae Pisum " + "sativum" + ) + cls.create_tree( + "Eukaryota Fungi Basidiomycota Homobasidiomycatae Agaricales Amanitacae " + "Amanita muscaria" + ) + + def test_access_fks_without_select_related(self): + """ + Normally, accessing FKs doesn't fill in related objects + """ + with self.assertNumQueries(8): + fly = Species.objects.get(name="melanogaster") + domain = fly.genus.family.order.klass.phylum.kingdom.domain + self.assertEqual(domain.name, "Eukaryota") + + def test_access_fks_with_select_related(self): + """ + A select_related() call will fill in those related objects without any + extra queries + """ + with self.assertNumQueries(1): + person = Species.objects.select_related( + "genus__family__order__klass__phylum__kingdom__domain" + ).get(name="sapiens") + domain = person.genus.family.order.klass.phylum.kingdom.domain + self.assertEqual(domain.name, "Eukaryota") + + def test_list_without_select_related(self): + with self.assertNumQueries(9): + world = Species.objects.all() + families = [o.genus.family.name for o in world] + self.assertEqual( + sorted(families), + [ + "Amanitacae", + "Drosophilidae", + "Fabaceae", + "Hominidae", + ], + ) + + def test_list_with_select_related(self): + """select_related() applies to entire lists, not just items.""" + with self.assertNumQueries(1): + world = Species.objects.select_related() + families = [o.genus.family.name for o in world] + self.assertEqual( + sorted(families), + [ + "Amanitacae", + "Drosophilidae", + "Fabaceae", + "Hominidae", + ], + ) + + def test_list_with_depth(self): + """ + Passing a relationship field lookup specifier to select_related() will + stop the descent at a particular level. This can be used on lists as + well. + """ + with self.assertNumQueries(5): + world = Species.objects.select_related("genus__family") + orders = [o.genus.family.order.name for o in world] + self.assertEqual( + sorted(orders), ["Agaricales", "Diptera", "Fabales", "Primates"] + ) + + def test_select_related_with_extra(self): + s = ( + Species.objects.all() + .select_related() + .extra(select={"a": "select_related_species.id + 10"})[0] + ) + self.assertEqual(s.id + 10, s.a) + + def test_certain_fields(self): + """ + The optional fields passed to select_related() control which related + models we pull in. This allows for smaller queries. + + In this case, we explicitly say to select the 'genus' and + 'genus.family' models, leading to the same number of queries as before. + """ + with self.assertNumQueries(1): + world = Species.objects.select_related("genus__family") + families = [o.genus.family.name for o in world] + self.assertEqual( + sorted(families), + ["Amanitacae", "Drosophilidae", "Fabaceae", "Hominidae"], + ) + + def test_more_certain_fields(self): + """ + In this case, we explicitly say to select the 'genus' and + 'genus.family' models, leading to the same number of queries as before. + """ + with self.assertNumQueries(2): + world = Species.objects.filter(genus__name="Amanita").select_related( + "genus__family" + ) + orders = [o.genus.family.order.name for o in world] + self.assertEqual(orders, ["Agaricales"]) + + def test_field_traversal(self): + with self.assertNumQueries(1): + s = ( + Species.objects.all() + .select_related("genus__family__order") + .order_by("id")[0:1] + .get() + .genus.family.order.name + ) + self.assertEqual(s, "Diptera") + + def test_none_clears_list(self): + queryset = Species.objects.select_related("genus").select_related(None) + self.assertIs(queryset.query.select_related, False) + + def test_chaining(self): + parent_1, parent_2 = Species.objects.all()[:2] + HybridSpecies.objects.create( + name="hybrid", parent_1=parent_1, parent_2=parent_2 + ) + queryset = HybridSpecies.objects.select_related("parent_1").select_related( + "parent_2" + ) + with self.assertNumQueries(1): + obj = queryset[0] + self.assertEqual(obj.parent_1, parent_1) + self.assertEqual(obj.parent_2, parent_2) + + def test_reverse_relation_caching(self): + species = ( + Species.objects.select_related("genus").filter(name="melanogaster").first() + ) + with self.assertNumQueries(0): + self.assertEqual(species.genus.name, "Drosophila") + # The species_set reverse relation isn't cached. + self.assertEqual(species.genus._state.fields_cache, {}) + with self.assertNumQueries(1): + self.assertEqual(species.genus.species_set.first().name, "melanogaster") + + def test_select_related_after_values(self): + """ + Running select_related() after calling values() raises a TypeError + """ + message = "Cannot call select_related() after .values() or .values_list()" + with self.assertRaisesMessage(TypeError, message): + list(Species.objects.values("name").select_related("genus")) + + def test_select_related_after_values_list(self): + """ + Running select_related() after calling values_list() raises a TypeError + """ + message = "Cannot call select_related() after .values() or .values_list()" + with self.assertRaisesMessage(TypeError, message): + list(Species.objects.values_list("name").select_related("genus")) + + +class SelectRelatedValidationTests(SimpleTestCase): + """ + select_related() should thrown an error on fields that do not exist and + non-relational fields. + """ + + non_relational_error = ( + "Non-relational field given in select_related: '%s'. Choices are: %s" + ) + invalid_error = ( + "Invalid field name(s) given in select_related: '%s'. Choices are: %s" + ) + + def test_non_relational_field(self): + with self.assertRaisesMessage( + FieldError, self.non_relational_error % ("name", "genus") + ): + list(Species.objects.select_related("name__some_field")) + + with self.assertRaisesMessage( + FieldError, self.non_relational_error % ("name", "genus") + ): + list(Species.objects.select_related("name")) + + with self.assertRaisesMessage( + FieldError, self.non_relational_error % ("name", "(none)") + ): + list(Domain.objects.select_related("name")) + + def test_non_relational_field_nested(self): + with self.assertRaisesMessage( + FieldError, self.non_relational_error % ("name", "family") + ): + list(Species.objects.select_related("genus__name")) + + def test_many_to_many_field(self): + with self.assertRaisesMessage( + FieldError, self.invalid_error % ("toppings", "(none)") + ): + list(Pizza.objects.select_related("toppings")) + + def test_reverse_relational_field(self): + with self.assertRaisesMessage( + FieldError, self.invalid_error % ("child_1", "genus") + ): + list(Species.objects.select_related("child_1")) + + def test_invalid_field(self): + with self.assertRaisesMessage( + FieldError, self.invalid_error % ("invalid_field", "genus") + ): + list(Species.objects.select_related("invalid_field")) + + with self.assertRaisesMessage( + FieldError, self.invalid_error % ("related_invalid_field", "family") + ): + list(Species.objects.select_related("genus__related_invalid_field")) + + with self.assertRaisesMessage( + FieldError, self.invalid_error % ("invalid_field", "(none)") + ): + list(Domain.objects.select_related("invalid_field")) + + def test_generic_relations(self): + with self.assertRaisesMessage(FieldError, self.invalid_error % ("tags", "")): + list(Bookmark.objects.select_related("tags")) + + with self.assertRaisesMessage( + FieldError, self.invalid_error % ("content_object", "content_type") + ): + list(TaggedItem.objects.select_related("content_object")) diff --git a/testbed/django__django/tests/select_related_onetoone/__init__.py b/testbed/django__django/tests/select_related_onetoone/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/select_related_onetoone/models.py b/testbed/django__django/tests/select_related_onetoone/models.py new file mode 100644 index 0000000000000000000000000000000000000000..5ffb6bfd8c88fcbad05ce998ffee777a91b47f44 --- /dev/null +++ b/testbed/django__django/tests/select_related_onetoone/models.py @@ -0,0 +1,78 @@ +from django.db import models + + +class User(models.Model): + username = models.CharField(max_length=100) + email = models.EmailField() + + +class UserProfile(models.Model): + user = models.OneToOneField(User, models.CASCADE) + city = models.CharField(max_length=100) + state = models.CharField(max_length=2) + + +class UserStatResult(models.Model): + results = models.CharField(max_length=50) + + +class UserStat(models.Model): + user = models.OneToOneField(User, models.CASCADE, primary_key=True) + posts = models.IntegerField() + results = models.ForeignKey(UserStatResult, models.CASCADE) + + +class StatDetails(models.Model): + base_stats = models.OneToOneField(UserStat, models.CASCADE) + comments = models.IntegerField() + + +class AdvancedUserStat(UserStat): + karma = models.IntegerField() + + +class Image(models.Model): + name = models.CharField(max_length=100) + + +class Product(models.Model): + name = models.CharField(max_length=100) + image = models.OneToOneField(Image, models.SET_NULL, null=True) + + +class Parent1(models.Model): + name1 = models.CharField(max_length=50) + + +class Parent2(models.Model): + # Avoid having two "id" fields in the Child1 subclass + id2 = models.AutoField(primary_key=True) + name2 = models.CharField(max_length=50) + + +class Child1(Parent1, Parent2): + value = models.IntegerField() + + +class Child2(Parent1): + parent2 = models.OneToOneField(Parent2, models.CASCADE) + value = models.IntegerField() + + +class Child3(Child2): + value3 = models.IntegerField() + + +class Child4(Child1): + value4 = models.IntegerField() + + +class LinkedList(models.Model): + name = models.CharField(max_length=50) + previous_item = models.OneToOneField( + "self", + models.CASCADE, + related_name="next_item", + blank=True, + null=True, + ) diff --git a/testbed/django__django/tests/select_related_onetoone/tests.py b/testbed/django__django/tests/select_related_onetoone/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..83462ed071669473292365ac8a48b0d887c87e2c --- /dev/null +++ b/testbed/django__django/tests/select_related_onetoone/tests.py @@ -0,0 +1,301 @@ +from django.core.exceptions import FieldError +from django.db.models import FilteredRelation +from django.test import SimpleTestCase, TestCase + +from .models import ( + AdvancedUserStat, + Child1, + Child2, + Child3, + Child4, + Image, + LinkedList, + Parent1, + Parent2, + Product, + StatDetails, + User, + UserProfile, + UserStat, + UserStatResult, +) + + +class ReverseSelectRelatedTestCase(TestCase): + @classmethod + def setUpTestData(cls): + user = User.objects.create(username="test") + UserProfile.objects.create(user=user, state="KS", city="Lawrence") + results = UserStatResult.objects.create(results="first results") + userstat = UserStat.objects.create(user=user, posts=150, results=results) + StatDetails.objects.create(base_stats=userstat, comments=259) + + user2 = User.objects.create(username="bob") + results2 = UserStatResult.objects.create(results="moar results") + advstat = AdvancedUserStat.objects.create( + user=user2, posts=200, karma=5, results=results2 + ) + StatDetails.objects.create(base_stats=advstat, comments=250) + p1 = Parent1(name1="Only Parent1") + p1.save() + c1 = Child1(name1="Child1 Parent1", name2="Child1 Parent2", value=1) + c1.save() + p2 = Parent2(name2="Child2 Parent2") + p2.save() + c2 = Child2(name1="Child2 Parent1", parent2=p2, value=2) + c2.save() + + def test_basic(self): + with self.assertNumQueries(1): + u = User.objects.select_related("userprofile").get(username="test") + self.assertEqual(u.userprofile.state, "KS") + + def test_follow_next_level(self): + with self.assertNumQueries(1): + u = User.objects.select_related("userstat__results").get(username="test") + self.assertEqual(u.userstat.posts, 150) + self.assertEqual(u.userstat.results.results, "first results") + + def test_follow_two(self): + with self.assertNumQueries(1): + u = User.objects.select_related("userprofile", "userstat").get( + username="test" + ) + self.assertEqual(u.userprofile.state, "KS") + self.assertEqual(u.userstat.posts, 150) + + def test_follow_two_next_level(self): + with self.assertNumQueries(1): + u = User.objects.select_related( + "userstat__results", "userstat__statdetails" + ).get(username="test") + self.assertEqual(u.userstat.results.results, "first results") + self.assertEqual(u.userstat.statdetails.comments, 259) + + def test_forward_and_back(self): + with self.assertNumQueries(1): + stat = UserStat.objects.select_related("user__userprofile").get( + user__username="test" + ) + self.assertEqual(stat.user.userprofile.state, "KS") + self.assertEqual(stat.user.userstat.posts, 150) + + def test_back_and_forward(self): + with self.assertNumQueries(1): + u = User.objects.select_related("userstat").get(username="test") + self.assertEqual(u.userstat.user.username, "test") + + def test_not_followed_by_default(self): + with self.assertNumQueries(2): + u = User.objects.select_related().get(username="test") + self.assertEqual(u.userstat.posts, 150) + + def test_follow_from_child_class(self): + with self.assertNumQueries(1): + stat = AdvancedUserStat.objects.select_related("user", "statdetails").get( + posts=200 + ) + self.assertEqual(stat.statdetails.comments, 250) + self.assertEqual(stat.user.username, "bob") + + def test_follow_inheritance(self): + with self.assertNumQueries(1): + stat = UserStat.objects.select_related("user", "advanceduserstat").get( + posts=200 + ) + self.assertEqual(stat.advanceduserstat.posts, 200) + self.assertEqual(stat.user.username, "bob") + with self.assertNumQueries(0): + self.assertEqual(stat.advanceduserstat.user.username, "bob") + + def test_nullable_relation(self): + im = Image.objects.create(name="imag1") + p1 = Product.objects.create(name="Django Plushie", image=im) + p2 = Product.objects.create(name="Talking Django Plushie") + + with self.assertNumQueries(1): + result = sorted( + Product.objects.select_related("image"), key=lambda x: x.name + ) + self.assertEqual( + [p.name for p in result], ["Django Plushie", "Talking Django Plushie"] + ) + + self.assertEqual(p1.image, im) + # Check for ticket #13839 + self.assertIsNone(p2.image) + + def test_missing_reverse(self): + """ + Ticket #13839: select_related() should NOT cache None + for missing objects on a reverse 1-1 relation. + """ + with self.assertNumQueries(1): + user = User.objects.select_related("userprofile").get(username="bob") + with self.assertRaises(UserProfile.DoesNotExist): + user.userprofile + + def test_nullable_missing_reverse(self): + """ + Ticket #13839: select_related() should NOT cache None + for missing objects on a reverse 0-1 relation. + """ + Image.objects.create(name="imag1") + + with self.assertNumQueries(1): + image = Image.objects.select_related("product").get() + with self.assertRaises(Product.DoesNotExist): + image.product + + def test_parent_only(self): + with self.assertNumQueries(1): + p = Parent1.objects.select_related("child1").get(name1="Only Parent1") + with self.assertNumQueries(0): + with self.assertRaises(Child1.DoesNotExist): + p.child1 + + def test_multiple_subclass(self): + with self.assertNumQueries(1): + p = Parent1.objects.select_related("child1").get(name1="Child1 Parent1") + self.assertEqual(p.child1.name2, "Child1 Parent2") + + def test_onetoone_with_subclass(self): + with self.assertNumQueries(1): + p = Parent2.objects.select_related("child2").get(name2="Child2 Parent2") + self.assertEqual(p.child2.name1, "Child2 Parent1") + + def test_onetoone_with_two_subclasses(self): + with self.assertNumQueries(1): + p = Parent2.objects.select_related("child2", "child2__child3").get( + name2="Child2 Parent2" + ) + self.assertEqual(p.child2.name1, "Child2 Parent1") + with self.assertRaises(Child3.DoesNotExist): + p.child2.child3 + p3 = Parent2(name2="Child3 Parent2") + p3.save() + c2 = Child3(name1="Child3 Parent1", parent2=p3, value=2, value3=3) + c2.save() + with self.assertNumQueries(1): + p = Parent2.objects.select_related("child2", "child2__child3").get( + name2="Child3 Parent2" + ) + self.assertEqual(p.child2.name1, "Child3 Parent1") + self.assertEqual(p.child2.child3.value3, 3) + self.assertEqual(p.child2.child3.value, p.child2.value) + self.assertEqual(p.child2.name1, p.child2.child3.name1) + + def test_multiinheritance_two_subclasses(self): + with self.assertNumQueries(1): + p = Parent1.objects.select_related("child1", "child1__child4").get( + name1="Child1 Parent1" + ) + self.assertEqual(p.child1.name2, "Child1 Parent2") + self.assertEqual(p.child1.name1, p.name1) + with self.assertRaises(Child4.DoesNotExist): + p.child1.child4 + Child4(name1="n1", name2="n2", value=1, value4=4).save() + with self.assertNumQueries(1): + p = Parent2.objects.select_related("child1", "child1__child4").get( + name2="n2" + ) + self.assertEqual(p.name2, "n2") + self.assertEqual(p.child1.name1, "n1") + self.assertEqual(p.child1.name2, p.name2) + self.assertEqual(p.child1.value, 1) + self.assertEqual(p.child1.child4.name1, p.child1.name1) + self.assertEqual(p.child1.child4.name2, p.child1.name2) + self.assertEqual(p.child1.child4.value, p.child1.value) + self.assertEqual(p.child1.child4.value4, 4) + + def test_inheritance_deferred(self): + c = Child4.objects.create(name1="n1", name2="n2", value=1, value4=4) + with self.assertNumQueries(1): + p = ( + Parent2.objects.select_related("child1") + .only("id2", "child1__value") + .get(name2="n2") + ) + self.assertEqual(p.id2, c.id2) + self.assertEqual(p.child1.value, 1) + p = ( + Parent2.objects.select_related("child1") + .only("id2", "child1__value") + .get(name2="n2") + ) + with self.assertNumQueries(1): + self.assertEqual(p.name2, "n2") + p = ( + Parent2.objects.select_related("child1") + .only("id2", "child1__value") + .get(name2="n2") + ) + with self.assertNumQueries(1): + self.assertEqual(p.child1.name2, "n2") + + def test_inheritance_deferred2(self): + c = Child4.objects.create(name1="n1", name2="n2", value=1, value4=4) + qs = Parent2.objects.select_related("child1", "child1__child4").only( + "id2", "child1__value", "child1__child4__value4" + ) + with self.assertNumQueries(1): + p = qs.get(name2="n2") + self.assertEqual(p.id2, c.id2) + self.assertEqual(p.child1.value, 1) + self.assertEqual(p.child1.child4.value4, 4) + self.assertEqual(p.child1.child4.id2, c.id2) + p = qs.get(name2="n2") + with self.assertNumQueries(1): + self.assertEqual(p.child1.name2, "n2") + p = qs.get(name2="n2") + with self.assertNumQueries(0): + self.assertEqual(p.child1.value, 1) + self.assertEqual(p.child1.child4.value4, 4) + with self.assertNumQueries(2): + self.assertEqual(p.child1.name1, "n1") + self.assertEqual(p.child1.child4.name1, "n1") + + def test_self_relation(self): + item1 = LinkedList.objects.create(name="item1") + LinkedList.objects.create(name="item2", previous_item=item1) + with self.assertNumQueries(1): + item1_db = LinkedList.objects.select_related("next_item").get(name="item1") + self.assertEqual(item1_db.next_item.name, "item2") + + +class ReverseSelectRelatedValidationTests(SimpleTestCase): + """ + Rverse related fields should be listed in the validation message when an + invalid field is given in select_related(). + """ + + non_relational_error = ( + "Non-relational field given in select_related: '%s'. Choices are: %s" + ) + invalid_error = ( + "Invalid field name(s) given in select_related: '%s'. Choices are: %s" + ) + + def test_reverse_related_validation(self): + fields = "userprofile, userstat" + + with self.assertRaisesMessage( + FieldError, self.invalid_error % ("foobar", fields) + ): + list(User.objects.select_related("foobar")) + + with self.assertRaisesMessage( + FieldError, self.non_relational_error % ("username", fields) + ): + list(User.objects.select_related("username")) + + def test_reverse_related_validation_with_filtered_relation(self): + fields = "userprofile, userstat, relation" + with self.assertRaisesMessage( + FieldError, self.invalid_error % ("foobar", fields) + ): + list( + User.objects.annotate( + relation=FilteredRelation("userprofile") + ).select_related("foobar") + ) diff --git a/testbed/django__django/tests/select_related_regress/__init__.py b/testbed/django__django/tests/select_related_regress/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/select_related_regress/models.py b/testbed/django__django/tests/select_related_regress/models.py new file mode 100644 index 0000000000000000000000000000000000000000..9bae754196711d9f8bb3aef6402a2a4c5099144f --- /dev/null +++ b/testbed/django__django/tests/select_related_regress/models.py @@ -0,0 +1,144 @@ +from django.db import models + + +class Building(models.Model): + name = models.CharField(max_length=10) + + +class Device(models.Model): + building = models.ForeignKey("Building", models.CASCADE) + name = models.CharField(max_length=10) + + +class Port(models.Model): + device = models.ForeignKey("Device", models.CASCADE) + port_number = models.CharField(max_length=10) + + def __str__(self): + return "%s/%s" % (self.device.name, self.port_number) + + +class Connection(models.Model): + start = models.ForeignKey( + Port, + models.CASCADE, + related_name="connection_start", + unique=True, + ) + end = models.ForeignKey( + Port, + models.CASCADE, + related_name="connection_end", + unique=True, + ) + + +# Another non-tree hierarchy that exercises code paths similar to the above +# example, but in a slightly different configuration. + + +class TUser(models.Model): + name = models.CharField(max_length=200) + + +class Person(models.Model): + user = models.ForeignKey(TUser, models.CASCADE, unique=True) + + +class Organizer(models.Model): + person = models.ForeignKey(Person, models.CASCADE) + + +class Student(models.Model): + person = models.ForeignKey(Person, models.CASCADE) + + +class Class(models.Model): + org = models.ForeignKey(Organizer, models.CASCADE) + + +class Enrollment(models.Model): + std = models.ForeignKey(Student, models.CASCADE) + cls = models.ForeignKey(Class, models.CASCADE) + + +# Models for testing bug #8036. + + +class Country(models.Model): + name = models.CharField(max_length=50) + + +class State(models.Model): + name = models.CharField(max_length=50) + country = models.ForeignKey(Country, models.CASCADE) + + +class ClientStatus(models.Model): + name = models.CharField(max_length=50) + + +class Client(models.Model): + name = models.CharField(max_length=50) + state = models.ForeignKey(State, models.SET_NULL, null=True) + status = models.ForeignKey(ClientStatus, models.CASCADE) + + +class SpecialClient(Client): + value = models.IntegerField() + + +# Some model inheritance exercises + + +class Parent(models.Model): + name = models.CharField(max_length=10) + + +class Child(Parent): + value = models.IntegerField() + + +class Item(models.Model): + name = models.CharField(max_length=10) + child = models.ForeignKey(Child, models.SET_NULL, null=True) + + def __str__(self): + return self.name + + +# Models for testing bug #19870. + + +class Fowl(models.Model): + name = models.CharField(max_length=10) + + +class Hen(Fowl): + pass + + +class Chick(Fowl): + mother = models.ForeignKey(Hen, models.CASCADE) + + +class Base(models.Model): + name = models.CharField(max_length=10) + lots_of_text = models.TextField() + + class Meta: + abstract = True + + +class A(Base): + a_field = models.CharField(max_length=10) + + +class B(Base): + b_field = models.CharField(max_length=10) + + +class C(Base): + c_a = models.ForeignKey(A, models.CASCADE) + c_b = models.ForeignKey(B, models.CASCADE) + is_published = models.BooleanField(default=False) diff --git a/testbed/django__django/tests/select_related_regress/tests.py b/testbed/django__django/tests/select_related_regress/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..94a15bde2400404ba63e29e60173d0f45017992c --- /dev/null +++ b/testbed/django__django/tests/select_related_regress/tests.py @@ -0,0 +1,257 @@ +from django.test import TestCase + +from .models import ( + A, + B, + Building, + C, + Chick, + Child, + Class, + Client, + ClientStatus, + Connection, + Country, + Device, + Enrollment, + Hen, + Item, + Organizer, + Person, + Port, + SpecialClient, + State, + Student, + TUser, +) + + +class SelectRelatedRegressTests(TestCase): + def test_regression_7110(self): + """ + Regression test for bug #7110. + + When using select_related(), we must query the + Device and Building tables using two different aliases (each) in order to + differentiate the start and end Connection fields. The net result is that + both the "connections = ..." queries here should give the same results + without pulling in more than the absolute minimum number of tables + (history has shown that it's easy to make a mistake in the implementation + and include some unnecessary bonus joins). + """ + + b = Building.objects.create(name="101") + dev1 = Device.objects.create(name="router", building=b) + dev2 = Device.objects.create(name="switch", building=b) + dev3 = Device.objects.create(name="server", building=b) + port1 = Port.objects.create(port_number="4", device=dev1) + port2 = Port.objects.create(port_number="7", device=dev2) + port3 = Port.objects.create(port_number="1", device=dev3) + c1 = Connection.objects.create(start=port1, end=port2) + c2 = Connection.objects.create(start=port2, end=port3) + + connections = Connection.objects.filter( + start__device__building=b, end__device__building=b + ).order_by("id") + self.assertEqual( + [(c.id, str(c.start), str(c.end)) for c in connections], + [(c1.id, "router/4", "switch/7"), (c2.id, "switch/7", "server/1")], + ) + + connections = ( + Connection.objects.filter( + start__device__building=b, end__device__building=b + ) + .select_related() + .order_by("id") + ) + self.assertEqual( + [(c.id, str(c.start), str(c.end)) for c in connections], + [(c1.id, "router/4", "switch/7"), (c2.id, "switch/7", "server/1")], + ) + + # This final query should only have seven tables (port, device and building + # twice each, plus connection once). Thus, 6 joins plus the FROM table. + self.assertEqual(str(connections.query).count(" JOIN "), 6) + + def test_regression_8106(self): + """ + Regression test for bug #8106. + + Same sort of problem as the previous test, but this time there are + more extra tables to pull in as part of the select_related() and some + of them could potentially clash (so need to be kept separate). + """ + + us = TUser.objects.create(name="std") + usp = Person.objects.create(user=us) + uo = TUser.objects.create(name="org") + uop = Person.objects.create(user=uo) + s = Student.objects.create(person=usp) + o = Organizer.objects.create(person=uop) + c = Class.objects.create(org=o) + Enrollment.objects.create(std=s, cls=c) + + e_related = Enrollment.objects.select_related()[0] + self.assertEqual(e_related.std.person.user.name, "std") + self.assertEqual(e_related.cls.org.person.user.name, "org") + + def test_regression_8036(self): + """ + Regression test for bug #8036 + + the first related model in the tests below + ("state") is empty and we try to select the more remotely related + state__country. The regression here was not skipping the empty column results + for country before getting status. + """ + + Country.objects.create(name="Australia") + active = ClientStatus.objects.create(name="active") + client = Client.objects.create(name="client", status=active) + + self.assertEqual(client.status, active) + self.assertEqual(Client.objects.select_related()[0].status, active) + self.assertEqual(Client.objects.select_related("state")[0].status, active) + self.assertEqual( + Client.objects.select_related("state", "status")[0].status, active + ) + self.assertEqual( + Client.objects.select_related("state__country")[0].status, active + ) + self.assertEqual( + Client.objects.select_related("state__country", "status")[0].status, active + ) + self.assertEqual(Client.objects.select_related("status")[0].status, active) + + def test_multi_table_inheritance(self): + """Exercising select_related() with multi-table model inheritance.""" + c1 = Child.objects.create(name="child1", value=42) + i1 = Item.objects.create(name="item1", child=c1) + i2 = Item.objects.create(name="item2") + + self.assertSequenceEqual( + Item.objects.select_related("child").order_by("name"), + [i1, i2], + ) + + def test_regression_12851(self): + """ + Regression for #12851 + + Deferred fields are used correctly if you select_related a subset + of fields. + """ + australia = Country.objects.create(name="Australia") + active = ClientStatus.objects.create(name="active") + + wa = State.objects.create(name="Western Australia", country=australia) + Client.objects.create(name="Brian Burke", state=wa, status=active) + burke = ( + Client.objects.select_related("state") + .defer("state__name") + .get(name="Brian Burke") + ) + + self.assertEqual(burke.name, "Brian Burke") + self.assertEqual(burke.state.name, "Western Australia") + + # Still works if we're dealing with an inherited class + SpecialClient.objects.create( + name="Troy Buswell", state=wa, status=active, value=42 + ) + troy = ( + SpecialClient.objects.select_related("state") + .defer("state__name") + .get(name="Troy Buswell") + ) + + self.assertEqual(troy.name, "Troy Buswell") + self.assertEqual(troy.value, 42) + self.assertEqual(troy.state.name, "Western Australia") + + # Still works if we defer an attribute on the inherited class + troy = ( + SpecialClient.objects.select_related("state") + .defer("value", "state__name") + .get(name="Troy Buswell") + ) + + self.assertEqual(troy.name, "Troy Buswell") + self.assertEqual(troy.value, 42) + self.assertEqual(troy.state.name, "Western Australia") + + # Also works if you use only, rather than defer + troy = ( + SpecialClient.objects.select_related("state") + .only("name", "state") + .get(name="Troy Buswell") + ) + + self.assertEqual(troy.name, "Troy Buswell") + self.assertEqual(troy.value, 42) + self.assertEqual(troy.state.name, "Western Australia") + + def test_null_join_promotion(self): + australia = Country.objects.create(name="Australia") + active = ClientStatus.objects.create(name="active") + + wa = State.objects.create(name="Western Australia", country=australia) + bob = Client.objects.create(name="Bob", status=active) + jack = Client.objects.create(name="Jack", status=active, state=wa) + qs = Client.objects.filter(state=wa).select_related("state") + with self.assertNumQueries(1): + self.assertEqual(list(qs), [jack]) + self.assertEqual(qs[0].state, wa) + # The select_related join wasn't promoted as there was already an + # existing (even if trimmed) inner join to state. + self.assertNotIn("LEFT OUTER", str(qs.query)) + qs = Client.objects.select_related("state").order_by("name") + with self.assertNumQueries(1): + self.assertEqual(list(qs), [bob, jack]) + self.assertIs(qs[0].state, None) + self.assertEqual(qs[1].state, wa) + # The select_related join was promoted as there is already an + # existing join. + self.assertIn("LEFT OUTER", str(qs.query)) + + def test_regression_19870(self): + hen = Hen.objects.create(name="Hen") + Chick.objects.create(name="Chick", mother=hen) + + self.assertEqual(Chick.objects.all()[0].mother.name, "Hen") + self.assertEqual(Chick.objects.select_related()[0].mother.name, "Hen") + + def test_regression_10733(self): + a = A.objects.create(name="a", lots_of_text="lots_of_text_a", a_field="a_field") + b = B.objects.create(name="b", lots_of_text="lots_of_text_b", b_field="b_field") + c = C.objects.create( + name="c", lots_of_text="lots_of_text_c", is_published=True, c_a=a, c_b=b + ) + results = C.objects.only( + "name", + "lots_of_text", + "c_a", + "c_b", + "c_b__lots_of_text", + "c_a__name", + "c_b__name", + ).select_related() + self.assertSequenceEqual(results, [c]) + with self.assertNumQueries(0): + qs_c = results[0] + self.assertEqual(qs_c.name, "c") + self.assertEqual(qs_c.lots_of_text, "lots_of_text_c") + self.assertEqual(qs_c.c_b.lots_of_text, "lots_of_text_b") + self.assertEqual(qs_c.c_a.name, "a") + self.assertEqual(qs_c.c_b.name, "b") + + def test_regression_22508(self): + building = Building.objects.create(name="101") + device = Device.objects.create(name="router", building=building) + Port.objects.create(port_number="1", device=device) + + device = Device.objects.get() + port = device.port_set.select_related("device__building").get() + with self.assertNumQueries(0): + port.device.building diff --git a/testbed/django__django/tests/serializers/__init__.py b/testbed/django__django/tests/serializers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/serializers/models/__init__.py b/testbed/django__django/tests/serializers/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9ac2381d17ce563025db51249e96527edb29a035 --- /dev/null +++ b/testbed/django__django/tests/serializers/models/__init__.py @@ -0,0 +1,4 @@ +from .base import * # NOQA +from .data import * # NOQA +from .multi_table import * # NOQA +from .natural import * # NOQA diff --git a/testbed/django__django/tests/serializers/models/base.py b/testbed/django__django/tests/serializers/models/base.py new file mode 100644 index 0000000000000000000000000000000000000000..e9f548ad3c64c4a0ec9533377314f9b48d73c9ea --- /dev/null +++ b/testbed/django__django/tests/serializers/models/base.py @@ -0,0 +1,177 @@ +""" +Serialization + +``django.core.serializers`` provides interfaces to converting Django +``QuerySet`` objects to and from "flat" data (i.e. strings). +""" +from decimal import Decimal + +from django.db import models + + +class CategoryMetaDataManager(models.Manager): + def get_by_natural_key(self, kind, name): + return self.get(kind=kind, name=name) + + +class CategoryMetaData(models.Model): + kind = models.CharField(max_length=10) + name = models.CharField(max_length=10) + value = models.CharField(max_length=10) + objects = CategoryMetaDataManager() + + class Meta: + unique_together = (("kind", "name"),) + + def __str__(self): + return "[%s:%s]=%s" % (self.kind, self.name, self.value) + + def natural_key(self): + return (self.kind, self.name) + + +class Category(models.Model): + name = models.CharField(max_length=20) + meta_data = models.ForeignKey( + CategoryMetaData, models.SET_NULL, null=True, default=None + ) + + class Meta: + ordering = ("name",) + + def __str__(self): + return self.name + + +class Author(models.Model): + name = models.CharField(max_length=20) + + class Meta: + ordering = ("name",) + + def __str__(self): + return self.name + + +class TopicManager(models.Manager): + def get_queryset(self): + return super().get_queryset().select_related("category") + + +class Topic(models.Model): + name = models.CharField(max_length=255) + category = models.ForeignKey(Category, models.CASCADE, null=True) + objects = TopicManager() + + +class Article(models.Model): + author = models.ForeignKey(Author, models.CASCADE) + headline = models.CharField(max_length=50) + pub_date = models.DateTimeField() + categories = models.ManyToManyField(Category) + meta_data = models.ManyToManyField(CategoryMetaData) + topics = models.ManyToManyField(Topic) + + class Meta: + ordering = ("pub_date",) + + def __str__(self): + return self.headline + + +class AuthorProfile(models.Model): + author = models.OneToOneField(Author, models.CASCADE, primary_key=True) + date_of_birth = models.DateField() + + def __str__(self): + return "Profile of %s" % self.author + + +class Actor(models.Model): + name = models.CharField(max_length=20, primary_key=True) + + class Meta: + ordering = ("name",) + + def __str__(self): + return self.name + + +class Movie(models.Model): + actor = models.ForeignKey(Actor, models.CASCADE) + title = models.CharField(max_length=50) + price = models.DecimalField(max_digits=6, decimal_places=2, default=Decimal("0.00")) + + class Meta: + ordering = ("title",) + + def __str__(self): + return self.title + + +class Score(models.Model): + score = models.FloatField() + + +class Team: + def __init__(self, title): + self.title = title + + def __str__(self): + raise NotImplementedError("Not so simple") + + def to_string(self): + return str(self.title) + + +class TeamField(models.CharField): + def __init__(self): + super().__init__(max_length=100) + + def get_db_prep_save(self, value, connection): + return str(value.title) + + def to_python(self, value): + if isinstance(value, Team): + return value + return Team(value) + + def from_db_value(self, value, expression, connection): + return Team(value) + + def value_to_string(self, obj): + return self.value_from_object(obj).to_string() + + def deconstruct(self): + name, path, args, kwargs = super().deconstruct() + del kwargs["max_length"] + return name, path, args, kwargs + + +class Player(models.Model): + name = models.CharField(max_length=50) + rank = models.IntegerField() + team = TeamField() + + def __str__(self): + return "%s (%d) playing for %s" % (self.name, self.rank, self.team.to_string()) + + +class BaseModel(models.Model): + parent_data = models.IntegerField() + + +class ProxyBaseModel(BaseModel): + class Meta: + proxy = True + + +class ProxyProxyBaseModel(ProxyBaseModel): + class Meta: + proxy = True + + +class ComplexModel(models.Model): + field1 = models.CharField(max_length=10) + field2 = models.CharField(max_length=10) + field3 = models.CharField(max_length=10) diff --git a/testbed/django__django/tests/serializers/models/data.py b/testbed/django__django/tests/serializers/models/data.py new file mode 100644 index 0000000000000000000000000000000000000000..3d863a3fb2b29e71a24363734528978f39f26458 --- /dev/null +++ b/testbed/django__django/tests/serializers/models/data.py @@ -0,0 +1,320 @@ +""" +******** Models for test_data.py *********** +The following classes are for testing basic data marshalling, including +NULL values, where allowed. +The basic idea is to have a model for each Django data type. +""" +import uuid + +from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation +from django.contrib.contenttypes.models import ContentType +from django.db import models + +from .base import BaseModel + + +class BinaryData(models.Model): + data = models.BinaryField(null=True) + + +class BooleanData(models.Model): + data = models.BooleanField(default=False, null=True) + + +class CharData(models.Model): + data = models.CharField(max_length=30, null=True) + + +class DateData(models.Model): + data = models.DateField(null=True) + + +class DateTimeData(models.Model): + data = models.DateTimeField(null=True) + + +class DecimalData(models.Model): + data = models.DecimalField(null=True, decimal_places=3, max_digits=5) + + +class EmailData(models.Model): + data = models.EmailField(null=True) + + +class FileData(models.Model): + data = models.FileField(null=True) + + +class FilePathData(models.Model): + data = models.FilePathField(null=True) + + +class FloatData(models.Model): + data = models.FloatField(null=True) + + +class IntegerData(models.Model): + data = models.IntegerField(null=True) + + +class BigIntegerData(models.Model): + data = models.BigIntegerField(null=True) + + +# class ImageData(models.Model): +# data = models.ImageField(null=True) + + +class GenericIPAddressData(models.Model): + data = models.GenericIPAddressField(null=True) + + +class PositiveBigIntegerData(models.Model): + data = models.PositiveBigIntegerField(null=True) + + +class PositiveIntegerData(models.Model): + data = models.PositiveIntegerField(null=True) + + +class PositiveSmallIntegerData(models.Model): + data = models.PositiveSmallIntegerField(null=True) + + +class SlugData(models.Model): + data = models.SlugField(null=True) + + +class SmallData(models.Model): + data = models.SmallIntegerField(null=True) + + +class TextData(models.Model): + data = models.TextField(null=True) + + +class TimeData(models.Model): + data = models.TimeField(null=True) + + +class Tag(models.Model): + """A tag on an item.""" + + data = models.SlugField() + content_type = models.ForeignKey(ContentType, models.CASCADE) + object_id = models.PositiveIntegerField() + + content_object = GenericForeignKey() + + class Meta: + ordering = ["data"] + + +class GenericData(models.Model): + data = models.CharField(max_length=30) + + tags = GenericRelation(Tag) + + +# The following test classes are all for validation +# of related objects; in particular, forward, backward, +# and self references. + + +class Anchor(models.Model): + """This is a model that can be used as + something for other models to point at""" + + data = models.CharField(max_length=30) + + class Meta: + ordering = ("id",) + + +class UniqueAnchor(models.Model): + """This is a model that can be used as + something for other models to point at""" + + data = models.CharField(unique=True, max_length=30) + + +class FKData(models.Model): + data = models.ForeignKey(Anchor, models.SET_NULL, null=True) + + +class M2MData(models.Model): + data = models.ManyToManyField(Anchor) + + +class O2OData(models.Model): + # One to one field can't be null here, since it is a PK. + data = models.OneToOneField(Anchor, models.CASCADE, primary_key=True) + + +class FKSelfData(models.Model): + data = models.ForeignKey("self", models.CASCADE, null=True) + + +class M2MSelfData(models.Model): + data = models.ManyToManyField("self", symmetrical=False) + + +class FKDataToField(models.Model): + data = models.ForeignKey(UniqueAnchor, models.SET_NULL, null=True, to_field="data") + + +class FKDataToO2O(models.Model): + data = models.ForeignKey(O2OData, models.SET_NULL, null=True) + + +class M2MIntermediateData(models.Model): + data = models.ManyToManyField(Anchor, through="Intermediate") + + +class Intermediate(models.Model): + left = models.ForeignKey(M2MIntermediateData, models.CASCADE) + right = models.ForeignKey(Anchor, models.CASCADE) + extra = models.CharField(max_length=30, blank=True, default="doesn't matter") + + +# The following test classes are for validating the +# deserialization of objects that use a user-defined +# field as the primary key. +# Some of these data types have been commented out +# because they can't be used as a primary key on one +# or all database backends. + + +class BooleanPKData(models.Model): + data = models.BooleanField(primary_key=True, default=False) + + +class CharPKData(models.Model): + data = models.CharField(max_length=30, primary_key=True) + + +class DatePKData(models.Model): + data = models.DateField(primary_key=True) + + +class DateTimePKData(models.Model): + data = models.DateTimeField(primary_key=True) + + +class DecimalPKData(models.Model): + data = models.DecimalField(primary_key=True, decimal_places=3, max_digits=5) + + +class EmailPKData(models.Model): + data = models.EmailField(primary_key=True) + + +# class FilePKData(models.Model): +# data = models.FileField(primary_key=True) + + +class FilePathPKData(models.Model): + data = models.FilePathField(primary_key=True) + + +class FloatPKData(models.Model): + data = models.FloatField(primary_key=True) + + +class IntegerPKData(models.Model): + data = models.IntegerField(primary_key=True) + + +# class ImagePKData(models.Model): +# data = models.ImageField(primary_key=True) + + +class GenericIPAddressPKData(models.Model): + data = models.GenericIPAddressField(primary_key=True) + + +class PositiveIntegerPKData(models.Model): + data = models.PositiveIntegerField(primary_key=True) + + +class PositiveSmallIntegerPKData(models.Model): + data = models.PositiveSmallIntegerField(primary_key=True) + + +class SlugPKData(models.Model): + data = models.SlugField(primary_key=True) + + +class SmallPKData(models.Model): + data = models.SmallIntegerField(primary_key=True) + + +# class TextPKData(models.Model): +# data = models.TextField(primary_key=True) + +# class TimePKData(models.Model): +# data = models.TimeField(primary_key=True) + + +class UUIDData(models.Model): + data = models.UUIDField(primary_key=True) + + +class UUIDDefaultData(models.Model): + data = models.UUIDField(primary_key=True, default=uuid.uuid4) + + +class FKToUUID(models.Model): + data = models.ForeignKey(UUIDData, models.CASCADE) + + +# Tests for handling fields with pre_save functions, or +# models with save functions that modify data + + +class AutoNowDateTimeData(models.Model): + data = models.DateTimeField(null=True, auto_now=True) + + +class ModifyingSaveData(models.Model): + data = models.IntegerField(null=True) + + def save(self, *args, **kwargs): + """ + A save method that modifies the data in the object. + A user-defined save() method isn't called when objects are deserialized + (#4459). + """ + self.data = 666 + super().save(*args, **kwargs) + + +# Tests for serialization of models using inheritance. +# Regression for #7202, #7350 + + +class AbstractBaseModel(models.Model): + parent_data = models.IntegerField() + + class Meta: + abstract = True + + +class InheritAbstractModel(AbstractBaseModel): + child_data = models.IntegerField() + + +class InheritBaseModel(BaseModel): + child_data = models.IntegerField() + + +class ExplicitInheritBaseModel(BaseModel): + parent = models.OneToOneField(BaseModel, models.CASCADE, parent_link=True) + child_data = models.IntegerField() + + +class LengthModel(models.Model): + data = models.IntegerField() + + def __len__(self): + return self.data diff --git a/testbed/django__django/tests/serializers/models/multi_table.py b/testbed/django__django/tests/serializers/models/multi_table.py new file mode 100644 index 0000000000000000000000000000000000000000..2f42b57137530e84ca0f3707ecf04bbf954dafb8 --- /dev/null +++ b/testbed/django__django/tests/serializers/models/multi_table.py @@ -0,0 +1,20 @@ +from django.db import models + + +class ParentManager(models.Manager): + def get_by_natural_key(self, parent_data): + return self.get(parent_data=parent_data) + + +class Parent(models.Model): + parent_data = models.CharField(max_length=30, unique=True) + parent_m2m = models.ManyToManyField("self") + + objects = ParentManager() + + def natural_key(self): + return (self.parent_data,) + + +class Child(Parent): + child_data = models.CharField(max_length=30, unique=True) diff --git a/testbed/django__django/tests/serializers/models/natural.py b/testbed/django__django/tests/serializers/models/natural.py new file mode 100644 index 0000000000000000000000000000000000000000..1e439b34ebd267a92c4c9a4f713dc158024e9c0b --- /dev/null +++ b/testbed/django__django/tests/serializers/models/natural.py @@ -0,0 +1,75 @@ +"""Models for test_natural.py""" +import uuid + +from django.db import models + + +class NaturalKeyAnchorManager(models.Manager): + def get_by_natural_key(self, data): + return self.get(data=data) + + +class NaturalKeyAnchor(models.Model): + data = models.CharField(max_length=100, unique=True) + title = models.CharField(max_length=100, null=True) + + objects = NaturalKeyAnchorManager() + + def natural_key(self): + return (self.data,) + + +class FKDataNaturalKey(models.Model): + data = models.ForeignKey(NaturalKeyAnchor, models.SET_NULL, null=True) + + +class NaturalKeyThing(models.Model): + key = models.CharField(max_length=100, unique=True) + other_thing = models.ForeignKey( + "NaturalKeyThing", on_delete=models.CASCADE, null=True + ) + other_things = models.ManyToManyField( + "NaturalKeyThing", related_name="thing_m2m_set" + ) + + class Manager(models.Manager): + def get_by_natural_key(self, key): + return self.get(key=key) + + objects = Manager() + + def natural_key(self): + return (self.key,) + + def __str__(self): + return self.key + + +class NaturalPKWithDefault(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + name = models.CharField(max_length=100, unique=True) + + class Manager(models.Manager): + def get_by_natural_key(self, name): + return self.get(name=name) + + objects = Manager() + + def natural_key(self): + return (self.name,) + + +class FKAsPKNoNaturalKeyManager(models.Manager): + def get_by_natural_key(self, *args, **kwargs): + return super().get_by_natural_key(*args, **kwargs) + + +class FKAsPKNoNaturalKey(models.Model): + pk_fk = models.ForeignKey( + NaturalKeyAnchor, on_delete=models.CASCADE, primary_key=True + ) + + objects = FKAsPKNoNaturalKeyManager() + + def natural_key(self): + raise NotImplementedError("This method was not expected to be called.") diff --git a/testbed/django__django/tests/serializers/test_data.py b/testbed/django__django/tests/serializers/test_data.py new file mode 100644 index 0000000000000000000000000000000000000000..e1cb776d83441bab1582f6384f93a96ea7633317 --- /dev/null +++ b/testbed/django__django/tests/serializers/test_data.py @@ -0,0 +1,480 @@ +""" +A test spanning all the capabilities of all the serializers. + +This class defines sample data and a dynamically generated +test case that is capable of testing the capabilities of +the serializers. This includes all valid data values, plus +forward, backwards and self references. +""" +import datetime +import decimal +import uuid + +from django.core import serializers +from django.db import connection, models +from django.test import TestCase + +from .models import ( + Anchor, + AutoNowDateTimeData, + BigIntegerData, + BinaryData, + BooleanData, + BooleanPKData, + CharData, + CharPKData, + DateData, + DatePKData, + DateTimeData, + DateTimePKData, + DecimalData, + DecimalPKData, + EmailData, + EmailPKData, + ExplicitInheritBaseModel, + FileData, + FilePathData, + FilePathPKData, + FKData, + FKDataToField, + FKDataToO2O, + FKSelfData, + FKToUUID, + FloatData, + FloatPKData, + GenericData, + GenericIPAddressData, + GenericIPAddressPKData, + InheritAbstractModel, + InheritBaseModel, + IntegerData, + IntegerPKData, + Intermediate, + LengthModel, + M2MData, + M2MIntermediateData, + M2MSelfData, + ModifyingSaveData, + O2OData, + PositiveBigIntegerData, + PositiveIntegerData, + PositiveIntegerPKData, + PositiveSmallIntegerData, + PositiveSmallIntegerPKData, + SlugData, + SlugPKData, + SmallData, + SmallPKData, + Tag, + TextData, + TimeData, + UniqueAnchor, + UUIDData, + UUIDDefaultData, +) +from .tests import register_tests + +# A set of functions that can be used to recreate +# test data objects of various kinds. +# The save method is a raw base model save, to make +# sure that the data in the database matches the +# exact test case. + + +def data_create(pk, klass, data): + instance = klass(id=pk) + instance.data = data + models.Model.save_base(instance, raw=True) + return [instance] + + +def generic_create(pk, klass, data): + instance = klass(id=pk) + instance.data = data[0] + models.Model.save_base(instance, raw=True) + for tag in data[1:]: + instance.tags.create(data=tag) + return [instance] + + +def fk_create(pk, klass, data): + instance = klass(id=pk) + setattr(instance, "data_id", data) + models.Model.save_base(instance, raw=True) + return [instance] + + +def m2m_create(pk, klass, data): + instance = klass(id=pk) + models.Model.save_base(instance, raw=True) + instance.data.set(data) + return [instance] + + +def im2m_create(pk, klass, data): + instance = klass(id=pk) + models.Model.save_base(instance, raw=True) + return [instance] + + +def im_create(pk, klass, data): + instance = klass(id=pk) + instance.right_id = data["right"] + instance.left_id = data["left"] + if "extra" in data: + instance.extra = data["extra"] + models.Model.save_base(instance, raw=True) + return [instance] + + +def o2o_create(pk, klass, data): + instance = klass() + instance.data_id = data + models.Model.save_base(instance, raw=True) + return [instance] + + +def pk_create(pk, klass, data): + instance = klass() + instance.data = data + models.Model.save_base(instance, raw=True) + return [instance] + + +def inherited_create(pk, klass, data): + instance = klass(id=pk, **data) + # This isn't a raw save because: + # 1) we're testing inheritance, not field behavior, so none + # of the field values need to be protected. + # 2) saving the child class and having the parent created + # automatically is easier than manually creating both. + models.Model.save(instance) + created = [instance] + for klass in instance._meta.parents: + created.append(klass.objects.get(id=pk)) + return created + + +# A set of functions that can be used to compare +# test data objects of various kinds + + +def data_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + if klass == BinaryData and data is not None: + testcase.assertEqual( + bytes(data), + bytes(instance.data), + "Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" + % ( + pk, + repr(bytes(data)), + type(data), + repr(bytes(instance.data)), + type(instance.data), + ), + ) + else: + testcase.assertEqual( + data, + instance.data, + "Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" + % ( + pk, + data, + type(data), + instance, + type(instance.data), + ), + ) + + +def generic_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + testcase.assertEqual(data[0], instance.data) + testcase.assertEqual(data[1:], [t.data for t in instance.tags.order_by("id")]) + + +def fk_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + testcase.assertEqual(data, instance.data_id) + + +def m2m_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + testcase.assertEqual(data, [obj.id for obj in instance.data.order_by("id")]) + + +def im2m_compare(testcase, pk, klass, data): + klass.objects.get(id=pk) + # actually nothing else to check, the instance just should exist + + +def im_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + testcase.assertEqual(data["left"], instance.left_id) + testcase.assertEqual(data["right"], instance.right_id) + if "extra" in data: + testcase.assertEqual(data["extra"], instance.extra) + else: + testcase.assertEqual("doesn't matter", instance.extra) + + +def o2o_compare(testcase, pk, klass, data): + instance = klass.objects.get(data=data) + testcase.assertEqual(data, instance.data_id) + + +def pk_compare(testcase, pk, klass, data): + instance = klass.objects.get(data=data) + testcase.assertEqual(data, instance.data) + + +def inherited_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + for key, value in data.items(): + testcase.assertEqual(value, getattr(instance, key)) + + +# Define some data types. Each data type is +# actually a pair of functions; one to create +# and one to compare objects of that type +data_obj = (data_create, data_compare) +generic_obj = (generic_create, generic_compare) +fk_obj = (fk_create, fk_compare) +m2m_obj = (m2m_create, m2m_compare) +im2m_obj = (im2m_create, im2m_compare) +im_obj = (im_create, im_compare) +o2o_obj = (o2o_create, o2o_compare) +pk_obj = (pk_create, pk_compare) +inherited_obj = (inherited_create, inherited_compare) +uuid_obj = uuid.uuid4() + +test_data = [ + # Format: (data type, PK value, Model Class, data) + (data_obj, 1, BinaryData, memoryview(b"\x05\xFD\x00")), + (data_obj, 2, BinaryData, None), + (data_obj, 5, BooleanData, True), + (data_obj, 6, BooleanData, False), + (data_obj, 7, BooleanData, None), + (data_obj, 10, CharData, "Test Char Data"), + (data_obj, 11, CharData, ""), + (data_obj, 12, CharData, "None"), + (data_obj, 13, CharData, "null"), + (data_obj, 14, CharData, "NULL"), + (data_obj, 15, CharData, None), + # (We use something that will fit into a latin1 database encoding here, + # because that is still the default used on many system setups.) + (data_obj, 16, CharData, "\xa5"), + (data_obj, 20, DateData, datetime.date(2006, 6, 16)), + (data_obj, 21, DateData, None), + (data_obj, 30, DateTimeData, datetime.datetime(2006, 6, 16, 10, 42, 37)), + (data_obj, 31, DateTimeData, None), + (data_obj, 40, EmailData, "hovercraft@example.com"), + (data_obj, 41, EmailData, None), + (data_obj, 42, EmailData, ""), + (data_obj, 50, FileData, "file:///foo/bar/whiz.txt"), + # (data_obj, 51, FileData, None), + (data_obj, 52, FileData, ""), + (data_obj, 60, FilePathData, "/foo/bar/whiz.txt"), + (data_obj, 61, FilePathData, None), + (data_obj, 62, FilePathData, ""), + (data_obj, 70, DecimalData, decimal.Decimal("12.345")), + (data_obj, 71, DecimalData, decimal.Decimal("-12.345")), + (data_obj, 72, DecimalData, decimal.Decimal("0.0")), + (data_obj, 73, DecimalData, None), + (data_obj, 74, FloatData, 12.345), + (data_obj, 75, FloatData, -12.345), + (data_obj, 76, FloatData, 0.0), + (data_obj, 77, FloatData, None), + (data_obj, 80, IntegerData, 123456789), + (data_obj, 81, IntegerData, -123456789), + (data_obj, 82, IntegerData, 0), + (data_obj, 83, IntegerData, None), + # (XX, ImageData + (data_obj, 95, GenericIPAddressData, "fe80:1424:2223:6cff:fe8a:2e8a:2151:abcd"), + (data_obj, 96, GenericIPAddressData, None), + (data_obj, 110, PositiveBigIntegerData, 9223372036854775807), + (data_obj, 111, PositiveBigIntegerData, None), + (data_obj, 120, PositiveIntegerData, 123456789), + (data_obj, 121, PositiveIntegerData, None), + (data_obj, 130, PositiveSmallIntegerData, 12), + (data_obj, 131, PositiveSmallIntegerData, None), + (data_obj, 140, SlugData, "this-is-a-slug"), + (data_obj, 141, SlugData, None), + (data_obj, 142, SlugData, ""), + (data_obj, 150, SmallData, 12), + (data_obj, 151, SmallData, -12), + (data_obj, 152, SmallData, 0), + (data_obj, 153, SmallData, None), + ( + data_obj, + 160, + TextData, + """This is a long piece of text. +It contains line breaks. +Several of them. +The end.""", + ), + (data_obj, 161, TextData, ""), + (data_obj, 162, TextData, None), + (data_obj, 170, TimeData, datetime.time(10, 42, 37)), + (data_obj, 171, TimeData, None), + (generic_obj, 200, GenericData, ["Generic Object 1", "tag1", "tag2"]), + (generic_obj, 201, GenericData, ["Generic Object 2", "tag2", "tag3"]), + (data_obj, 300, Anchor, "Anchor 1"), + (data_obj, 301, Anchor, "Anchor 2"), + (data_obj, 302, UniqueAnchor, "UAnchor 1"), + (fk_obj, 400, FKData, 300), # Post reference + (fk_obj, 401, FKData, 500), # Pre reference + (fk_obj, 402, FKData, None), # Empty reference + (m2m_obj, 410, M2MData, []), # Empty set + (m2m_obj, 411, M2MData, [300, 301]), # Post reference + (m2m_obj, 412, M2MData, [500, 501]), # Pre reference + (m2m_obj, 413, M2MData, [300, 301, 500, 501]), # Pre and Post reference + (o2o_obj, None, O2OData, 300), # Post reference + (o2o_obj, None, O2OData, 500), # Pre reference + (fk_obj, 430, FKSelfData, 431), # Pre reference + (fk_obj, 431, FKSelfData, 430), # Post reference + (fk_obj, 432, FKSelfData, None), # Empty reference + (m2m_obj, 440, M2MSelfData, []), + (m2m_obj, 441, M2MSelfData, []), + (m2m_obj, 442, M2MSelfData, [440, 441]), + (m2m_obj, 443, M2MSelfData, [445, 446]), + (m2m_obj, 444, M2MSelfData, [440, 441, 445, 446]), + (m2m_obj, 445, M2MSelfData, []), + (m2m_obj, 446, M2MSelfData, []), + (fk_obj, 450, FKDataToField, "UAnchor 1"), + (fk_obj, 451, FKDataToField, "UAnchor 2"), + (fk_obj, 452, FKDataToField, None), + (fk_obj, 460, FKDataToO2O, 300), + (im2m_obj, 470, M2MIntermediateData, None), + # testing post- and pre-references and extra fields + (im_obj, 480, Intermediate, {"right": 300, "left": 470}), + (im_obj, 481, Intermediate, {"right": 300, "left": 490}), + (im_obj, 482, Intermediate, {"right": 500, "left": 470}), + (im_obj, 483, Intermediate, {"right": 500, "left": 490}), + (im_obj, 484, Intermediate, {"right": 300, "left": 470, "extra": "extra"}), + (im_obj, 485, Intermediate, {"right": 300, "left": 490, "extra": "extra"}), + (im_obj, 486, Intermediate, {"right": 500, "left": 470, "extra": "extra"}), + (im_obj, 487, Intermediate, {"right": 500, "left": 490, "extra": "extra"}), + (im2m_obj, 490, M2MIntermediateData, []), + (data_obj, 500, Anchor, "Anchor 3"), + (data_obj, 501, Anchor, "Anchor 4"), + (data_obj, 502, UniqueAnchor, "UAnchor 2"), + (pk_obj, 601, BooleanPKData, True), + (pk_obj, 602, BooleanPKData, False), + (pk_obj, 610, CharPKData, "Test Char PKData"), + (pk_obj, 620, DatePKData, datetime.date(2006, 6, 16)), + (pk_obj, 630, DateTimePKData, datetime.datetime(2006, 6, 16, 10, 42, 37)), + (pk_obj, 640, EmailPKData, "hovercraft@example.com"), + # (pk_obj, 650, FilePKData, 'file:///foo/bar/whiz.txt'), + (pk_obj, 660, FilePathPKData, "/foo/bar/whiz.txt"), + (pk_obj, 670, DecimalPKData, decimal.Decimal("12.345")), + (pk_obj, 671, DecimalPKData, decimal.Decimal("-12.345")), + (pk_obj, 672, DecimalPKData, decimal.Decimal("0.0")), + (pk_obj, 673, FloatPKData, 12.345), + (pk_obj, 674, FloatPKData, -12.345), + (pk_obj, 675, FloatPKData, 0.0), + (pk_obj, 680, IntegerPKData, 123456789), + (pk_obj, 681, IntegerPKData, -123456789), + (pk_obj, 682, IntegerPKData, 0), + # (XX, ImagePKData + (pk_obj, 695, GenericIPAddressPKData, "fe80:1424:2223:6cff:fe8a:2e8a:2151:abcd"), + (pk_obj, 720, PositiveIntegerPKData, 123456789), + (pk_obj, 730, PositiveSmallIntegerPKData, 12), + (pk_obj, 740, SlugPKData, "this-is-a-slug"), + (pk_obj, 750, SmallPKData, 12), + (pk_obj, 751, SmallPKData, -12), + (pk_obj, 752, SmallPKData, 0), + # (pk_obj, 760, TextPKData, """This is a long piece of text. + # It contains line breaks. + # Several of them. + # The end."""), + # (pk_obj, 770, TimePKData, datetime.time(10, 42, 37)), + # (pk_obj, 790, XMLPKData, ""), + (pk_obj, 791, UUIDData, uuid_obj), + (fk_obj, 792, FKToUUID, uuid_obj), + (pk_obj, 793, UUIDDefaultData, uuid_obj), + (data_obj, 800, AutoNowDateTimeData, datetime.datetime(2006, 6, 16, 10, 42, 37)), + (data_obj, 810, ModifyingSaveData, 42), + (inherited_obj, 900, InheritAbstractModel, {"child_data": 37, "parent_data": 42}), + ( + inherited_obj, + 910, + ExplicitInheritBaseModel, + {"child_data": 37, "parent_data": 42}, + ), + (inherited_obj, 920, InheritBaseModel, {"child_data": 37, "parent_data": 42}), + (data_obj, 1000, BigIntegerData, 9223372036854775807), + (data_obj, 1001, BigIntegerData, -9223372036854775808), + (data_obj, 1002, BigIntegerData, 0), + (data_obj, 1003, BigIntegerData, None), + (data_obj, 1004, LengthModel, 0), + (data_obj, 1005, LengthModel, 1), +] + + +# Because Oracle treats the empty string as NULL, Oracle is expected to fail +# when field.empty_strings_allowed is True and the value is None; skip these +# tests. +if connection.features.interprets_empty_strings_as_nulls: + test_data = [ + data + for data in test_data + if not ( + data[0] == data_obj + and data[2]._meta.get_field("data").empty_strings_allowed + and data[3] is None + ) + ] + + +class SerializerDataTests(TestCase): + pass + + +def serializerTest(self, format): + # FK to an object with PK of 0. This won't work on MySQL without the + # NO_AUTO_VALUE_ON_ZERO SQL mode since it won't let you create an object + # with an autoincrement primary key of 0. + if connection.features.allows_auto_pk_0: + test_data.extend( + [ + (data_obj, 0, Anchor, "Anchor 0"), + (fk_obj, 465, FKData, 0), + ] + ) + + # Create all the objects defined in the test data + objects = [] + instance_count = {} + for func, pk, klass, datum in test_data: + with connection.constraint_checks_disabled(): + objects.extend(func[0](pk, klass, datum)) + + # Get a count of the number of objects created for each class + for klass in instance_count: + instance_count[klass] = klass.objects.count() + + # Add the generic tagged objects to the object list + objects.extend(Tag.objects.all()) + + # Serialize the test database + serialized_data = serializers.serialize(format, objects, indent=2) + + for obj in serializers.deserialize(format, serialized_data): + obj.save() + + # Assert that the deserialized data is the same + # as the original source + for func, pk, klass, datum in test_data: + func[1](self, pk, klass, datum) + + # Assert that the number of objects deserialized is the + # same as the number that was serialized. + for klass, count in instance_count.items(): + self.assertEqual(count, klass.objects.count()) + + +register_tests(SerializerDataTests, "test_%s_serializer", serializerTest) diff --git a/testbed/django__django/tests/serializers/test_deserializedobject.py b/testbed/django__django/tests/serializers/test_deserializedobject.py new file mode 100644 index 0000000000000000000000000000000000000000..1252052100cae489ac5fb20bea862d08e6a76f6b --- /dev/null +++ b/testbed/django__django/tests/serializers/test_deserializedobject.py @@ -0,0 +1,13 @@ +from django.core.serializers.base import DeserializedObject +from django.test import SimpleTestCase + +from .models import Author + + +class TestDeserializedObjectTests(SimpleTestCase): + def test_repr(self): + author = Author(name="John", pk=1) + deserial_obj = DeserializedObject(obj=author) + self.assertEqual( + repr(deserial_obj), "" + ) diff --git a/testbed/django__django/tests/serializers/test_json.py b/testbed/django__django/tests/serializers/test_json.py new file mode 100644 index 0000000000000000000000000000000000000000..65d521faacb141b1ec9fdbb6b33dea3869afdf16 --- /dev/null +++ b/testbed/django__django/tests/serializers/test_json.py @@ -0,0 +1,330 @@ +import datetime +import decimal +import json +import re + +from django.core import serializers +from django.core.serializers.base import DeserializationError +from django.core.serializers.json import DjangoJSONEncoder +from django.db import models +from django.test import SimpleTestCase, TestCase, TransactionTestCase +from django.test.utils import isolate_apps +from django.utils.translation import gettext_lazy, override + +from .models import Score +from .tests import SerializersTestBase, SerializersTransactionTestBase + + +class JsonSerializerTestCase(SerializersTestBase, TestCase): + serializer_name = "json" + pkless_str = """[ + { + "pk": null, + "model": "serializers.category", + "fields": {"name": "Reference"} + }, { + "model": "serializers.category", + "fields": {"name": "Non-fiction"} + }]""" + mapping_ordering_str = """[ +{ + "model": "serializers.article", + "pk": %(article_pk)s, + "fields": { + "author": %(author_pk)s, + "headline": "Poker has no place on ESPN", + "pub_date": "2006-06-16T11:00:00", + "categories": [ + %(first_category_pk)s, + %(second_category_pk)s + ], + "meta_data": [], + "topics": [] + } +} +] +""" + + @staticmethod + def _validate_output(serial_str): + try: + json.loads(serial_str) + except Exception: + return False + else: + return True + + @staticmethod + def _get_pk_values(serial_str): + serial_list = json.loads(serial_str) + return [obj_dict["pk"] for obj_dict in serial_list] + + @staticmethod + def _get_field_values(serial_str, field_name): + serial_list = json.loads(serial_str) + return [ + obj_dict["fields"][field_name] + for obj_dict in serial_list + if field_name in obj_dict["fields"] + ] + + def test_indentation_whitespace(self): + s = serializers.json.Serializer() + json_data = s.serialize([Score(score=5.0), Score(score=6.0)], indent=2) + for line in json_data.splitlines(): + if re.search(r".+,\s*$", line): + self.assertEqual(line, line.rstrip()) + + @isolate_apps("serializers") + def test_custom_encoder(self): + class ScoreDecimal(models.Model): + score = models.DecimalField() + + class CustomJSONEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, decimal.Decimal): + return str(o) + return super().default(o) + + s = serializers.json.Serializer() + json_data = s.serialize( + [ScoreDecimal(score=decimal.Decimal(1.0))], cls=CustomJSONEncoder + ) + self.assertIn('"fields": {"score": "1"}', json_data) + + def test_json_deserializer_exception(self): + with self.assertRaises(DeserializationError): + for obj in serializers.deserialize("json", """[{"pk":1}"""): + pass + + def test_helpful_error_message_invalid_pk(self): + """ + If there is an invalid primary key, the error message should contain + the model associated with it. + """ + test_string = """[{ + "pk": "badpk", + "model": "serializers.player", + "fields": { + "name": "Bob", + "rank": 1, + "team": "Team" + } + }]""" + with self.assertRaisesMessage( + DeserializationError, "(serializers.player:pk=badpk)" + ): + list(serializers.deserialize("json", test_string)) + + def test_helpful_error_message_invalid_field(self): + """ + If there is an invalid field value, the error message should contain + the model associated with it. + """ + test_string = """[{ + "pk": "1", + "model": "serializers.player", + "fields": { + "name": "Bob", + "rank": "invalidint", + "team": "Team" + } + }]""" + expected = "(serializers.player:pk=1) field_value was 'invalidint'" + with self.assertRaisesMessage(DeserializationError, expected): + list(serializers.deserialize("json", test_string)) + + def test_helpful_error_message_for_foreign_keys(self): + """ + Invalid foreign keys with a natural key should throw a helpful error + message, such as what the failing key is. + """ + test_string = """[{ + "pk": 1, + "model": "serializers.category", + "fields": { + "name": "Unknown foreign key", + "meta_data": [ + "doesnotexist", + "metadata" + ] + } + }]""" + key = ["doesnotexist", "metadata"] + expected = "(serializers.category:pk=1) field_value was '%r'" % key + with self.assertRaisesMessage(DeserializationError, expected): + list(serializers.deserialize("json", test_string)) + + def test_helpful_error_message_for_many2many_non_natural(self): + """ + Invalid many-to-many keys should throw a helpful error message. + """ + test_string = """[{ + "pk": 1, + "model": "serializers.article", + "fields": { + "author": 1, + "headline": "Unknown many to many", + "pub_date": "2014-09-15T10:35:00", + "categories": [1, "doesnotexist"] + } + }, { + "pk": 1, + "model": "serializers.author", + "fields": { + "name": "Agnes" + } + }, { + "pk": 1, + "model": "serializers.category", + "fields": { + "name": "Reference" + } + }]""" + expected = "(serializers.article:pk=1) field_value was 'doesnotexist'" + with self.assertRaisesMessage(DeserializationError, expected): + list(serializers.deserialize("json", test_string)) + + def test_helpful_error_message_for_many2many_natural1(self): + """ + Invalid many-to-many keys should throw a helpful error message. + This tests the code path where one of a list of natural keys is invalid. + """ + test_string = """[{ + "pk": 1, + "model": "serializers.categorymetadata", + "fields": { + "kind": "author", + "name": "meta1", + "value": "Agnes" + } + }, { + "pk": 1, + "model": "serializers.article", + "fields": { + "author": 1, + "headline": "Unknown many to many", + "pub_date": "2014-09-15T10:35:00", + "meta_data": [ + ["author", "meta1"], + ["doesnotexist", "meta1"], + ["author", "meta1"] + ] + } + }, { + "pk": 1, + "model": "serializers.author", + "fields": { + "name": "Agnes" + } + }]""" + key = ["doesnotexist", "meta1"] + expected = "(serializers.article:pk=1) field_value was '%r'" % key + with self.assertRaisesMessage(DeserializationError, expected): + for obj in serializers.deserialize("json", test_string): + obj.save() + + def test_helpful_error_message_for_many2many_natural2(self): + """ + Invalid many-to-many keys should throw a helpful error message. This + tests the code path where a natural many-to-many key has only a single + value. + """ + test_string = """[{ + "pk": 1, + "model": "serializers.article", + "fields": { + "author": 1, + "headline": "Unknown many to many", + "pub_date": "2014-09-15T10:35:00", + "meta_data": [1, "doesnotexist"] + } + }, { + "pk": 1, + "model": "serializers.categorymetadata", + "fields": { + "kind": "author", + "name": "meta1", + "value": "Agnes" + } + }, { + "pk": 1, + "model": "serializers.author", + "fields": { + "name": "Agnes" + } + }]""" + expected = "(serializers.article:pk=1) field_value was 'doesnotexist'" + with self.assertRaisesMessage(DeserializationError, expected): + for obj in serializers.deserialize("json", test_string, ignore=False): + obj.save() + + def test_helpful_error_message_for_many2many_not_iterable(self): + """ + Not iterable many-to-many field value throws a helpful error message. + """ + test_string = """[{ + "pk": 1, + "model": "serializers.m2mdata", + "fields": {"data": null} + }]""" + + expected = "(serializers.m2mdata:pk=1) field_value was 'None'" + with self.assertRaisesMessage(DeserializationError, expected): + next(serializers.deserialize("json", test_string, ignore=False)) + + +class JsonSerializerTransactionTestCase( + SerializersTransactionTestBase, TransactionTestCase +): + serializer_name = "json" + fwd_ref_str = """[ + { + "pk": 1, + "model": "serializers.article", + "fields": { + "headline": "Forward references pose no problem", + "pub_date": "2006-06-16T15:00:00", + "categories": [1], + "author": 1 + } + }, + { + "pk": 1, + "model": "serializers.category", + "fields": { + "name": "Reference" + } + }, + { + "pk": 1, + "model": "serializers.author", + "fields": { + "name": "Agnes" + } + }]""" + + +class DjangoJSONEncoderTests(SimpleTestCase): + def test_lazy_string_encoding(self): + self.assertEqual( + json.dumps({"lang": gettext_lazy("French")}, cls=DjangoJSONEncoder), + '{"lang": "French"}', + ) + with override("fr"): + self.assertEqual( + json.dumps({"lang": gettext_lazy("French")}, cls=DjangoJSONEncoder), + '{"lang": "Fran\\u00e7ais"}', + ) + + def test_timedelta(self): + duration = datetime.timedelta(days=1, hours=2, seconds=3) + self.assertEqual( + json.dumps({"duration": duration}, cls=DjangoJSONEncoder), + '{"duration": "P1DT02H00M03S"}', + ) + duration = datetime.timedelta(0) + self.assertEqual( + json.dumps({"duration": duration}, cls=DjangoJSONEncoder), + '{"duration": "P0DT00H00M00S"}', + ) diff --git a/testbed/django__django/tests/serializers/test_jsonl.py b/testbed/django__django/tests/serializers/test_jsonl.py new file mode 100644 index 0000000000000000000000000000000000000000..3137b037a982fe9aa63ea28f55d5acad2a72e3fd --- /dev/null +++ b/testbed/django__django/tests/serializers/test_jsonl.py @@ -0,0 +1,271 @@ +import decimal +import json +import re + +from django.core import serializers +from django.core.serializers.base import DeserializationError +from django.db import models +from django.test import TestCase, TransactionTestCase +from django.test.utils import isolate_apps + +from .models import Score +from .tests import SerializersTestBase, SerializersTransactionTestBase + + +class JsonlSerializerTestCase(SerializersTestBase, TestCase): + serializer_name = "jsonl" + pkless_str = [ + '{"pk": null,"model": "serializers.category","fields": {"name": "Reference"}}', + '{"model": "serializers.category","fields": {"name": "Non-fiction"}}', + ] + pkless_str = "\n".join([s.replace("\n", "") for s in pkless_str]) + + mapping_ordering_str = ( + '{"model": "serializers.article","pk": %(article_pk)s,' + '"fields": {' + '"author": %(author_pk)s,' + '"headline": "Poker has no place on ESPN",' + '"pub_date": "2006-06-16T11:00:00",' + '"categories": [%(first_category_pk)s,%(second_category_pk)s],' + '"meta_data": [],' + '"topics": []}}\n' + ) + + @staticmethod + def _validate_output(serial_str): + try: + for line in serial_str.split("\n"): + if line: + json.loads(line) + except Exception: + return False + else: + return True + + @staticmethod + def _get_pk_values(serial_str): + serial_list = [json.loads(line) for line in serial_str.split("\n") if line] + return [obj_dict["pk"] for obj_dict in serial_list] + + @staticmethod + def _get_field_values(serial_str, field_name): + serial_list = [json.loads(line) for line in serial_str.split("\n") if line] + return [ + obj_dict["fields"][field_name] + for obj_dict in serial_list + if field_name in obj_dict["fields"] + ] + + def test_no_indentation(self): + s = serializers.jsonl.Serializer() + json_data = s.serialize([Score(score=5.0), Score(score=6.0)], indent=2) + for line in json_data.splitlines(): + self.assertIsNone(re.search(r".+,\s*$", line)) + + @isolate_apps("serializers") + def test_custom_encoder(self): + class ScoreDecimal(models.Model): + score = models.DecimalField() + + class CustomJSONEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, decimal.Decimal): + return str(o) + return super().default(o) + + s = serializers.jsonl.Serializer() + json_data = s.serialize( + [ScoreDecimal(score=decimal.Decimal(1.0))], + cls=CustomJSONEncoder, + ) + self.assertIn('"fields": {"score": "1"}', json_data) + + def test_json_deserializer_exception(self): + with self.assertRaises(DeserializationError): + for obj in serializers.deserialize("jsonl", """[{"pk":1}"""): + pass + + def test_helpful_error_message_invalid_pk(self): + """ + If there is an invalid primary key, the error message contains the + model associated with it. + """ + test_string = ( + '{"pk": "badpk","model": "serializers.player",' + '"fields": {"name": "Bob","rank": 1,"team": "Team"}}' + ) + with self.assertRaisesMessage( + DeserializationError, "(serializers.player:pk=badpk)" + ): + list(serializers.deserialize("jsonl", test_string)) + + def test_helpful_error_message_invalid_field(self): + """ + If there is an invalid field value, the error message contains the + model associated with it. + """ + test_string = ( + '{"pk": "1","model": "serializers.player",' + '"fields": {"name": "Bob","rank": "invalidint","team": "Team"}}' + ) + expected = "(serializers.player:pk=1) field_value was 'invalidint'" + with self.assertRaisesMessage(DeserializationError, expected): + list(serializers.deserialize("jsonl", test_string)) + + def test_helpful_error_message_for_foreign_keys(self): + """ + Invalid foreign keys with a natural key throws a helpful error message, + such as what the failing key is. + """ + test_string = ( + '{"pk": 1, "model": "serializers.category",' + '"fields": {' + '"name": "Unknown foreign key",' + '"meta_data": ["doesnotexist","metadata"]}}' + ) + key = ["doesnotexist", "metadata"] + expected = "(serializers.category:pk=1) field_value was '%r'" % key + with self.assertRaisesMessage(DeserializationError, expected): + list(serializers.deserialize("jsonl", test_string)) + + def test_helpful_error_message_for_many2many_non_natural(self): + """ + Invalid many-to-many keys throws a helpful error message. + """ + test_strings = [ + """{ + "pk": 1, + "model": "serializers.article", + "fields": { + "author": 1, + "headline": "Unknown many to many", + "pub_date": "2014-09-15T10:35:00", + "categories": [1, "doesnotexist"] + } + }""", + """{ + "pk": 1, + "model": "serializers.author", + "fields": {"name": "Agnes"} + }""", + """{ + "pk": 1, + "model": "serializers.category", + "fields": {"name": "Reference"} + }""", + ] + test_string = "\n".join([s.replace("\n", "") for s in test_strings]) + expected = "(serializers.article:pk=1) field_value was 'doesnotexist'" + with self.assertRaisesMessage(DeserializationError, expected): + list(serializers.deserialize("jsonl", test_string)) + + def test_helpful_error_message_for_many2many_natural1(self): + """ + Invalid many-to-many keys throws a helpful error message where one of a + list of natural keys is invalid. + """ + test_strings = [ + """{ + "pk": 1, + "model": "serializers.categorymetadata", + "fields": {"kind": "author","name": "meta1","value": "Agnes"} + }""", + """{ + "pk": 1, + "model": "serializers.article", + "fields": { + "author": 1, + "headline": "Unknown many to many", + "pub_date": "2014-09-15T10:35:00", + "meta_data": [ + ["author", "meta1"], + ["doesnotexist", "meta1"], + ["author", "meta1"] + ] + } + }""", + """{ + "pk": 1, + "model": "serializers.author", + "fields": {"name": "Agnes"} + }""", + ] + test_string = "\n".join([s.replace("\n", "") for s in test_strings]) + key = ["doesnotexist", "meta1"] + expected = "(serializers.article:pk=1) field_value was '%r'" % key + with self.assertRaisesMessage(DeserializationError, expected): + for obj in serializers.deserialize("jsonl", test_string): + obj.save() + + def test_helpful_error_message_for_many2many_natural2(self): + """ + Invalid many-to-many keys throws a helpful error message where a + natural many-to-many key has only a single value. + """ + test_strings = [ + """{ + "pk": 1, + "model": "serializers.article", + "fields": { + "author": 1, + "headline": "Unknown many to many", + "pub_date": "2014-09-15T10:35:00", + "meta_data": [1, "doesnotexist"] + } + }""", + """{ + "pk": 1, + "model": "serializers.categorymetadata", + "fields": {"kind": "author","name": "meta1","value": "Agnes"} + }""", + """{ + "pk": 1, + "model": "serializers.author", + "fields": {"name": "Agnes"} + }""", + ] + test_string = "\n".join([s.replace("\n", "") for s in test_strings]) + expected = "(serializers.article:pk=1) field_value was 'doesnotexist'" + with self.assertRaisesMessage(DeserializationError, expected): + for obj in serializers.deserialize("jsonl", test_string, ignore=False): + obj.save() + + def test_helpful_error_message_for_many2many_not_iterable(self): + """ + Not iterable many-to-many field value throws a helpful error message. + """ + test_string = ( + '{"pk": 1,"model": "serializers.m2mdata","fields": {"data": null}}' + ) + expected = "(serializers.m2mdata:pk=1) field_value was 'None'" + with self.assertRaisesMessage(DeserializationError, expected): + next(serializers.deserialize("jsonl", test_string, ignore=False)) + + +class JsonSerializerTransactionTestCase( + SerializersTransactionTestBase, TransactionTestCase +): + serializer_name = "jsonl" + fwd_ref_str = [ + """{ + "pk": 1, + "model": "serializers.article", + "fields": { + "headline": "Forward references pose no problem", + "pub_date": "2006-06-16T15:00:00", + "categories": [1], + "author": 1 + } + }""", + """{ + "pk": 1, + "model": "serializers.category", + "fields": {"name": "Reference"} + }""", + """{ + "pk": 1, + "model": "serializers.author", + "fields": {"name": "Agnes"} + }""", + ] + fwd_ref_str = "\n".join([s.replace("\n", "") for s in fwd_ref_str]) diff --git a/testbed/django__django/tests/serializers/test_natural.py b/testbed/django__django/tests/serializers/test_natural.py new file mode 100644 index 0000000000000000000000000000000000000000..b5b35708c66c70049e01d276862e3ba26d0bfba3 --- /dev/null +++ b/testbed/django__django/tests/serializers/test_natural.py @@ -0,0 +1,286 @@ +from django.core import serializers +from django.db import connection +from django.test import TestCase + +from .models import ( + Child, + FKAsPKNoNaturalKey, + FKDataNaturalKey, + NaturalKeyAnchor, + NaturalKeyThing, + NaturalPKWithDefault, +) +from .tests import register_tests + + +class NaturalKeySerializerTests(TestCase): + pass + + +def natural_key_serializer_test(self, format): + # Create all the objects defined in the test data + with connection.constraint_checks_disabled(): + objects = [ + NaturalKeyAnchor.objects.create(id=1100, data="Natural Key Anghor"), + FKDataNaturalKey.objects.create(id=1101, data_id=1100), + FKDataNaturalKey.objects.create(id=1102, data_id=None), + ] + # Serialize the test database + serialized_data = serializers.serialize( + format, objects, indent=2, use_natural_foreign_keys=True + ) + + for obj in serializers.deserialize(format, serialized_data): + obj.save() + + # Assert that the deserialized data is the same + # as the original source + for obj in objects: + instance = obj.__class__.objects.get(id=obj.pk) + self.assertEqual( + obj.data, + instance.data, + "Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" + % ( + obj.pk, + obj.data, + type(obj.data), + instance, + type(instance.data), + ), + ) + + +def natural_key_test(self, format): + book1 = { + "data": "978-1590597255", + "title": "The Definitive Guide to Django: Web Development Done Right", + } + book2 = {"data": "978-1590599969", "title": "Practical Django Projects"} + + # Create the books. + adrian = NaturalKeyAnchor.objects.create(**book1) + james = NaturalKeyAnchor.objects.create(**book2) + + # Serialize the books. + string_data = serializers.serialize( + format, + NaturalKeyAnchor.objects.all(), + indent=2, + use_natural_foreign_keys=True, + use_natural_primary_keys=True, + ) + + # Delete one book (to prove that the natural key generation will only + # restore the primary keys of books found in the database via the + # get_natural_key manager method). + james.delete() + + # Deserialize and test. + books = list(serializers.deserialize(format, string_data)) + self.assertCountEqual( + [(book.object.title, book.object.pk) for book in books], + [ + (book1["title"], adrian.pk), + (book2["title"], None), + ], + ) + + +def natural_pk_mti_test(self, format): + """ + If serializing objects in a multi-table inheritance relationship using + natural primary keys, the natural foreign key for the parent is output in + the fields of the child so it's possible to relate the child to the parent + when deserializing. + """ + child_1 = Child.objects.create(parent_data="1", child_data="1") + child_2 = Child.objects.create(parent_data="2", child_data="2") + string_data = serializers.serialize( + format, + [child_1.parent_ptr, child_2.parent_ptr, child_2, child_1], + use_natural_foreign_keys=True, + use_natural_primary_keys=True, + ) + child_1.delete() + child_2.delete() + for obj in serializers.deserialize(format, string_data): + obj.save() + children = Child.objects.all() + self.assertEqual(len(children), 2) + for child in children: + # If it's possible to find the superclass from the subclass and it's + # the correct superclass, it's working. + self.assertEqual(child.child_data, child.parent_data) + + +def forward_ref_fk_test(self, format): + t1 = NaturalKeyThing.objects.create(key="t1") + t2 = NaturalKeyThing.objects.create(key="t2", other_thing=t1) + t1.other_thing = t2 + t1.save() + string_data = serializers.serialize( + format, + [t1, t2], + use_natural_primary_keys=True, + use_natural_foreign_keys=True, + ) + NaturalKeyThing.objects.all().delete() + objs_with_deferred_fields = [] + for obj in serializers.deserialize( + format, string_data, handle_forward_references=True + ): + obj.save() + if obj.deferred_fields: + objs_with_deferred_fields.append(obj) + for obj in objs_with_deferred_fields: + obj.save_deferred_fields() + t1 = NaturalKeyThing.objects.get(key="t1") + t2 = NaturalKeyThing.objects.get(key="t2") + self.assertEqual(t1.other_thing, t2) + self.assertEqual(t2.other_thing, t1) + + +def forward_ref_fk_with_error_test(self, format): + t1 = NaturalKeyThing.objects.create(key="t1") + t2 = NaturalKeyThing.objects.create(key="t2", other_thing=t1) + t1.other_thing = t2 + t1.save() + string_data = serializers.serialize( + format, + [t1], + use_natural_primary_keys=True, + use_natural_foreign_keys=True, + ) + NaturalKeyThing.objects.all().delete() + objs_with_deferred_fields = [] + for obj in serializers.deserialize( + format, string_data, handle_forward_references=True + ): + obj.save() + if obj.deferred_fields: + objs_with_deferred_fields.append(obj) + obj = objs_with_deferred_fields[0] + msg = "NaturalKeyThing matching query does not exist" + with self.assertRaisesMessage(serializers.base.DeserializationError, msg): + obj.save_deferred_fields() + + +def forward_ref_m2m_test(self, format): + t1 = NaturalKeyThing.objects.create(key="t1") + t2 = NaturalKeyThing.objects.create(key="t2") + t3 = NaturalKeyThing.objects.create(key="t3") + t1.other_things.set([t2, t3]) + string_data = serializers.serialize( + format, + [t1, t2, t3], + use_natural_primary_keys=True, + use_natural_foreign_keys=True, + ) + NaturalKeyThing.objects.all().delete() + objs_with_deferred_fields = [] + for obj in serializers.deserialize( + format, string_data, handle_forward_references=True + ): + obj.save() + if obj.deferred_fields: + objs_with_deferred_fields.append(obj) + for obj in objs_with_deferred_fields: + obj.save_deferred_fields() + t1 = NaturalKeyThing.objects.get(key="t1") + t2 = NaturalKeyThing.objects.get(key="t2") + t3 = NaturalKeyThing.objects.get(key="t3") + self.assertCountEqual(t1.other_things.all(), [t2, t3]) + + +def forward_ref_m2m_with_error_test(self, format): + t1 = NaturalKeyThing.objects.create(key="t1") + t2 = NaturalKeyThing.objects.create(key="t2") + t3 = NaturalKeyThing.objects.create(key="t3") + t1.other_things.set([t2, t3]) + t1.save() + string_data = serializers.serialize( + format, + [t1, t2], + use_natural_primary_keys=True, + use_natural_foreign_keys=True, + ) + NaturalKeyThing.objects.all().delete() + objs_with_deferred_fields = [] + for obj in serializers.deserialize( + format, string_data, handle_forward_references=True + ): + obj.save() + if obj.deferred_fields: + objs_with_deferred_fields.append(obj) + obj = objs_with_deferred_fields[0] + msg = "NaturalKeyThing matching query does not exist" + with self.assertRaisesMessage(serializers.base.DeserializationError, msg): + obj.save_deferred_fields() + + +def pk_with_default(self, format): + """ + The deserializer works with natural keys when the primary key has a default + value. + """ + obj = NaturalPKWithDefault.objects.create(name="name") + string_data = serializers.serialize( + format, + NaturalPKWithDefault.objects.all(), + use_natural_foreign_keys=True, + use_natural_primary_keys=True, + ) + objs = list(serializers.deserialize(format, string_data)) + self.assertEqual(len(objs), 1) + self.assertEqual(objs[0].object.pk, obj.pk) + + +def fk_as_pk_natural_key_not_called(self, format): + """ + The deserializer doesn't rely on natural keys when a model has a custom + primary key that is a ForeignKey. + """ + o1 = NaturalKeyAnchor.objects.create(data="978-1590599969") + o2 = FKAsPKNoNaturalKey.objects.create(pk_fk=o1) + serialized_data = serializers.serialize(format, [o1, o2]) + deserialized_objects = list(serializers.deserialize(format, serialized_data)) + self.assertEqual(len(deserialized_objects), 2) + for obj in deserialized_objects: + self.assertEqual(obj.object.pk, o1.pk) + + +# Dynamically register tests for each serializer +register_tests( + NaturalKeySerializerTests, + "test_%s_natural_key_serializer", + natural_key_serializer_test, +) +register_tests( + NaturalKeySerializerTests, "test_%s_serializer_natural_keys", natural_key_test +) +register_tests( + NaturalKeySerializerTests, "test_%s_serializer_natural_pks_mti", natural_pk_mti_test +) +register_tests( + NaturalKeySerializerTests, "test_%s_forward_references_fks", forward_ref_fk_test +) +register_tests( + NaturalKeySerializerTests, + "test_%s_forward_references_fk_errors", + forward_ref_fk_with_error_test, +) +register_tests( + NaturalKeySerializerTests, "test_%s_forward_references_m2ms", forward_ref_m2m_test +) +register_tests( + NaturalKeySerializerTests, + "test_%s_forward_references_m2m_errors", + forward_ref_m2m_with_error_test, +) +register_tests(NaturalKeySerializerTests, "test_%s_pk_with_default", pk_with_default) +register_tests( + NaturalKeySerializerTests, + "test_%s_fk_as_pk_natural_key_not_called", + fk_as_pk_natural_key_not_called, +) diff --git a/testbed/django__django/tests/serializers/test_xml.py b/testbed/django__django/tests/serializers/test_xml.py new file mode 100644 index 0000000000000000000000000000000000000000..c9df2f2a5bc5a94cdecd5f1c3c4d1586a8d18357 --- /dev/null +++ b/testbed/django__django/tests/serializers/test_xml.py @@ -0,0 +1,114 @@ +from xml.dom import minidom + +from django.core import serializers +from django.core.serializers.xml_serializer import DTDForbidden +from django.test import TestCase, TransactionTestCase + +from .tests import SerializersTestBase, SerializersTransactionTestBase + + +class XmlSerializerTestCase(SerializersTestBase, TestCase): + serializer_name = "xml" + pkless_str = """ + + + Reference + + + Non-fiction + +""" + mapping_ordering_str = """ + + + %(author_pk)s + Poker has no place on ESPN + 2006-06-16T11:00:00 + + + + +""" # NOQA + + @staticmethod + def _validate_output(serial_str): + try: + minidom.parseString(serial_str) + except Exception: + return False + else: + return True + + @staticmethod + def _get_pk_values(serial_str): + ret_list = [] + dom = minidom.parseString(serial_str) + fields = dom.getElementsByTagName("object") + for field in fields: + ret_list.append(field.getAttribute("pk")) + return ret_list + + @staticmethod + def _get_field_values(serial_str, field_name): + ret_list = [] + dom = minidom.parseString(serial_str) + fields = dom.getElementsByTagName("field") + for field in fields: + if field.getAttribute("name") == field_name: + temp = [] + for child in field.childNodes: + temp.append(child.nodeValue) + ret_list.append("".join(temp)) + return ret_list + + def test_control_char_failure(self): + """ + Serializing control characters with XML should fail as those characters + are not supported in the XML 1.0 standard (except HT, LF, CR). + """ + self.a1.headline = "This contains \u0001 control \u0011 chars" + msg = "Article.headline (pk:%s) contains unserializable characters" % self.a1.pk + with self.assertRaisesMessage(ValueError, msg): + serializers.serialize(self.serializer_name, [self.a1]) + self.a1.headline = "HT \u0009, LF \u000A, and CR \u000D are allowed" + self.assertIn( + "HT \t, LF \n, and CR \r are allowed", + serializers.serialize(self.serializer_name, [self.a1]), + ) + + def test_no_dtd(self): + """ + The XML deserializer shouldn't allow a DTD. + + This is the most straightforward way to prevent all entity definitions + and avoid both external entities and entity-expansion attacks. + """ + xml = ( + '' + '' + ) + with self.assertRaises(DTDForbidden): + next(serializers.deserialize("xml", xml)) + + +class XmlSerializerTransactionTestCase( + SerializersTransactionTestBase, TransactionTestCase +): + serializer_name = "xml" + fwd_ref_str = """ + + + 1 + Forward references pose no problem + 2006-06-16T15:00:00 + + + + + + + Agnes + + + Reference +""" # NOQA diff --git a/testbed/django__django/tests/serializers/test_yaml.py b/testbed/django__django/tests/serializers/test_yaml.py new file mode 100644 index 0000000000000000000000000000000000000000..6db6f046fd49504cc53d63f3008ca589d679d98b --- /dev/null +++ b/testbed/django__django/tests/serializers/test_yaml.py @@ -0,0 +1,178 @@ +import importlib +import unittest +from io import StringIO + +from django.core import management, serializers +from django.core.serializers.base import DeserializationError +from django.test import SimpleTestCase, TestCase, TransactionTestCase + +from .models import Author +from .tests import SerializersTestBase, SerializersTransactionTestBase + +try: + import yaml + + HAS_YAML = True +except ImportError: + HAS_YAML = False + +YAML_IMPORT_ERROR_MESSAGE = r"No module named yaml" + + +class YamlImportModuleMock: + """Provides a wrapped import_module function to simulate yaml ImportError + + In order to run tests that verify the behavior of the YAML serializer + when run on a system that has yaml installed (like the django CI server), + mock import_module, so that it raises an ImportError when the yaml + serializer is being imported. The importlib.import_module() call is + being made in the serializers.register_serializer(). + + Refs: #12756 + """ + + def __init__(self): + self._import_module = importlib.import_module + + def import_module(self, module_path): + if module_path == serializers.BUILTIN_SERIALIZERS["yaml"]: + raise ImportError(YAML_IMPORT_ERROR_MESSAGE) + + return self._import_module(module_path) + + +class NoYamlSerializerTestCase(SimpleTestCase): + """Not having pyyaml installed provides a misleading error + + Refs: #12756 + """ + + @classmethod + def setUpClass(cls): + """Removes imported yaml and stubs importlib.import_module""" + super().setUpClass() + + cls._import_module_mock = YamlImportModuleMock() + importlib.import_module = cls._import_module_mock.import_module + + # clear out cached serializers to emulate yaml missing + serializers._serializers = {} + + @classmethod + def tearDownClass(cls): + """Puts yaml back if necessary""" + super().tearDownClass() + + importlib.import_module = cls._import_module_mock._import_module + + # clear out cached serializers to clean out BadSerializer instances + serializers._serializers = {} + + def test_serializer_pyyaml_error_message(self): + """Using yaml serializer without pyyaml raises ImportError""" + jane = Author(name="Jane") + with self.assertRaises(ImportError): + serializers.serialize("yaml", [jane]) + + def test_deserializer_pyyaml_error_message(self): + """Using yaml deserializer without pyyaml raises ImportError""" + with self.assertRaises(ImportError): + serializers.deserialize("yaml", "") + + def test_dumpdata_pyyaml_error_message(self): + """Calling dumpdata produces an error when yaml package missing""" + with self.assertRaisesMessage( + management.CommandError, YAML_IMPORT_ERROR_MESSAGE + ): + management.call_command("dumpdata", format="yaml") + + +@unittest.skipUnless(HAS_YAML, "No yaml library detected") +class YamlSerializerTestCase(SerializersTestBase, TestCase): + serializer_name = "yaml" + pkless_str = """- model: serializers.category + pk: null + fields: + name: Reference +- model: serializers.category + fields: + name: Non-fiction""" + + mapping_ordering_str = ( + """- model: serializers.article + pk: %(article_pk)s + fields: + author: %(author_pk)s + headline: Poker has no place on ESPN + pub_date: 2006-06-16 11:00:00 + categories:""" + + ( + " [%(first_category_pk)s, %(second_category_pk)s]" + if HAS_YAML and yaml.__version__ < "5.1" + else "\n - %(first_category_pk)s\n - %(second_category_pk)s" + ) + + """ + meta_data: [] + topics: [] +""" + ) + + @staticmethod + def _validate_output(serial_str): + try: + yaml.safe_load(StringIO(serial_str)) + except Exception: + return False + else: + return True + + @staticmethod + def _get_pk_values(serial_str): + ret_list = [] + stream = StringIO(serial_str) + for obj_dict in yaml.safe_load(stream): + ret_list.append(obj_dict["pk"]) + return ret_list + + @staticmethod + def _get_field_values(serial_str, field_name): + ret_list = [] + stream = StringIO(serial_str) + for obj_dict in yaml.safe_load(stream): + if "fields" in obj_dict and field_name in obj_dict["fields"]: + field_value = obj_dict["fields"][field_name] + # yaml.safe_load will return non-string objects for some + # of the fields we are interested in, this ensures that + # everything comes back as a string + if isinstance(field_value, str): + ret_list.append(field_value) + else: + ret_list.append(str(field_value)) + return ret_list + + def test_yaml_deserializer_exception(self): + with self.assertRaises(DeserializationError): + for obj in serializers.deserialize("yaml", "{"): + pass + + +@unittest.skipUnless(HAS_YAML, "No yaml library detected") +class YamlSerializerTransactionTestCase( + SerializersTransactionTestBase, TransactionTestCase +): + serializer_name = "yaml" + fwd_ref_str = """- model: serializers.article + pk: 1 + fields: + headline: Forward references pose no problem + pub_date: 2006-06-16 15:00:00 + categories: [1] + author: 1 +- model: serializers.category + pk: 1 + fields: + name: Reference +- model: serializers.author + pk: 1 + fields: + name: Agnes""" diff --git a/testbed/django__django/tests/serializers/tests.py b/testbed/django__django/tests/serializers/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..58bd74d3269b7a1f114267acd83b01e9c42918d8 --- /dev/null +++ b/testbed/django__django/tests/serializers/tests.py @@ -0,0 +1,505 @@ +from datetime import datetime +from functools import partialmethod +from io import StringIO +from unittest import mock, skipIf + +from django.core import serializers +from django.core.serializers import SerializerDoesNotExist +from django.core.serializers.base import ProgressBar +from django.db import connection, transaction +from django.http import HttpResponse +from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature +from django.test.utils import Approximate + +from .models import ( + Actor, + Article, + Author, + AuthorProfile, + BaseModel, + Category, + Child, + ComplexModel, + Movie, + Player, + ProxyBaseModel, + ProxyProxyBaseModel, + Score, + Team, +) + + +@override_settings( + SERIALIZATION_MODULES={ + "json2": "django.core.serializers.json", + } +) +class SerializerRegistrationTests(SimpleTestCase): + def setUp(self): + self.old_serializers = serializers._serializers + serializers._serializers = {} + + def tearDown(self): + serializers._serializers = self.old_serializers + + def test_register(self): + "Registering a new serializer populates the full registry. Refs #14823" + serializers.register_serializer("json3", "django.core.serializers.json") + + public_formats = serializers.get_public_serializer_formats() + self.assertIn("json3", public_formats) + self.assertIn("json2", public_formats) + self.assertIn("xml", public_formats) + + def test_unregister(self): + """ + Unregistering a serializer doesn't cause the registry to be + repopulated. + """ + serializers.unregister_serializer("xml") + serializers.register_serializer("json3", "django.core.serializers.json") + + public_formats = serializers.get_public_serializer_formats() + + self.assertNotIn("xml", public_formats) + self.assertIn("json3", public_formats) + + def test_unregister_unknown_serializer(self): + with self.assertRaises(SerializerDoesNotExist): + serializers.unregister_serializer("nonsense") + + def test_builtin_serializers(self): + "Requesting a list of serializer formats populates the registry" + all_formats = set(serializers.get_serializer_formats()) + public_formats = set(serializers.get_public_serializer_formats()) + + self.assertIn("xml", all_formats), + self.assertIn("xml", public_formats) + + self.assertIn("json2", all_formats) + self.assertIn("json2", public_formats) + + self.assertIn("python", all_formats) + self.assertNotIn("python", public_formats) + + def test_get_unknown_serializer(self): + """ + #15889: get_serializer('nonsense') raises a SerializerDoesNotExist + """ + with self.assertRaises(SerializerDoesNotExist): + serializers.get_serializer("nonsense") + + with self.assertRaises(KeyError): + serializers.get_serializer("nonsense") + + # SerializerDoesNotExist is instantiated with the nonexistent format + with self.assertRaisesMessage(SerializerDoesNotExist, "nonsense"): + serializers.get_serializer("nonsense") + + def test_get_unknown_deserializer(self): + with self.assertRaises(SerializerDoesNotExist): + serializers.get_deserializer("nonsense") + + +class SerializersTestBase: + serializer_name = None # Set by subclasses to the serialization format name + + @classmethod + def setUpTestData(cls): + sports = Category.objects.create(name="Sports") + music = Category.objects.create(name="Music") + op_ed = Category.objects.create(name="Op-Ed") + + cls.joe = Author.objects.create(name="Joe") + cls.jane = Author.objects.create(name="Jane") + + cls.a1 = Article( + author=cls.jane, + headline="Poker has no place on ESPN", + pub_date=datetime(2006, 6, 16, 11, 00), + ) + cls.a1.save() + cls.a1.categories.set([sports, op_ed]) + + cls.a2 = Article( + author=cls.joe, + headline="Time to reform copyright", + pub_date=datetime(2006, 6, 16, 13, 00, 11, 345), + ) + cls.a2.save() + cls.a2.categories.set([music, op_ed]) + + def test_serialize(self): + """Basic serialization works.""" + serial_str = serializers.serialize(self.serializer_name, Article.objects.all()) + self.assertTrue(self._validate_output(serial_str)) + + def test_serializer_roundtrip(self): + """Serialized content can be deserialized.""" + serial_str = serializers.serialize(self.serializer_name, Article.objects.all()) + models = list(serializers.deserialize(self.serializer_name, serial_str)) + self.assertEqual(len(models), 2) + + def test_serialize_to_stream(self): + obj = ComplexModel(field1="first", field2="second", field3="third") + obj.save_base(raw=True) + + # Serialize the test database to a stream + for stream in (StringIO(), HttpResponse()): + serializers.serialize(self.serializer_name, [obj], indent=2, stream=stream) + + # Serialize normally for a comparison + string_data = serializers.serialize(self.serializer_name, [obj], indent=2) + + # The two are the same + if isinstance(stream, StringIO): + self.assertEqual(string_data, stream.getvalue()) + else: + self.assertEqual(string_data, stream.content.decode()) + + def test_serialize_specific_fields(self): + obj = ComplexModel(field1="first", field2="second", field3="third") + obj.save_base(raw=True) + + # Serialize then deserialize the test database + serialized_data = serializers.serialize( + self.serializer_name, [obj], indent=2, fields=("field1", "field3") + ) + result = next(serializers.deserialize(self.serializer_name, serialized_data)) + + # The deserialized object contains data in only the serialized fields. + self.assertEqual(result.object.field1, "first") + self.assertEqual(result.object.field2, "") + self.assertEqual(result.object.field3, "third") + + def test_altering_serialized_output(self): + """ + The ability to create new objects by modifying serialized content. + """ + old_headline = "Poker has no place on ESPN" + new_headline = "Poker has no place on television" + serial_str = serializers.serialize(self.serializer_name, Article.objects.all()) + serial_str = serial_str.replace(old_headline, new_headline) + models = list(serializers.deserialize(self.serializer_name, serial_str)) + + # Prior to saving, old headline is in place + self.assertTrue(Article.objects.filter(headline=old_headline)) + self.assertFalse(Article.objects.filter(headline=new_headline)) + + for model in models: + model.save() + + # After saving, new headline is in place + self.assertTrue(Article.objects.filter(headline=new_headline)) + self.assertFalse(Article.objects.filter(headline=old_headline)) + + def test_one_to_one_as_pk(self): + """ + If you use your own primary key field (such as a OneToOneField), it + doesn't appear in the serialized field list - it replaces the pk + identifier. + """ + AuthorProfile.objects.create( + author=self.joe, date_of_birth=datetime(1970, 1, 1) + ) + serial_str = serializers.serialize( + self.serializer_name, AuthorProfile.objects.all() + ) + self.assertFalse(self._get_field_values(serial_str, "author")) + + for obj in serializers.deserialize(self.serializer_name, serial_str): + self.assertEqual(obj.object.pk, self.joe.pk) + + def test_serialize_field_subset(self): + """Output can be restricted to a subset of fields""" + valid_fields = ("headline", "pub_date") + invalid_fields = ("author", "categories") + serial_str = serializers.serialize( + self.serializer_name, Article.objects.all(), fields=valid_fields + ) + for field_name in invalid_fields: + self.assertFalse(self._get_field_values(serial_str, field_name)) + + for field_name in valid_fields: + self.assertTrue(self._get_field_values(serial_str, field_name)) + + def test_serialize_unicode_roundtrip(self): + """Unicode makes the roundtrip intact""" + actor_name = "Za\u017c\u00f3\u0142\u0107" + movie_title = "G\u0119\u015bl\u0105 ja\u017a\u0144" + ac = Actor(name=actor_name) + mv = Movie(title=movie_title, actor=ac) + ac.save() + mv.save() + + serial_str = serializers.serialize(self.serializer_name, [mv]) + self.assertEqual(self._get_field_values(serial_str, "title")[0], movie_title) + self.assertEqual(self._get_field_values(serial_str, "actor")[0], actor_name) + + obj_list = list(serializers.deserialize(self.serializer_name, serial_str)) + mv_obj = obj_list[0].object + self.assertEqual(mv_obj.title, movie_title) + + def test_unicode_serialization(self): + unicode_name = "יוניקוד" + data = serializers.serialize(self.serializer_name, [Author(name=unicode_name)]) + self.assertIn(unicode_name, data) + objs = list(serializers.deserialize(self.serializer_name, data)) + self.assertEqual(objs[0].object.name, unicode_name) + + def test_serialize_progressbar(self): + fake_stdout = StringIO() + serializers.serialize( + self.serializer_name, + Article.objects.all(), + progress_output=fake_stdout, + object_count=Article.objects.count(), + ) + self.assertTrue( + fake_stdout.getvalue().endswith( + "[" + "." * ProgressBar.progress_width + "]\n" + ) + ) + + def test_serialize_superfluous_queries(self): + """Ensure no superfluous queries are made when serializing ForeignKeys + + #17602 + """ + ac = Actor(name="Actor name") + ac.save() + mv = Movie(title="Movie title", actor_id=ac.pk) + mv.save() + + with self.assertNumQueries(0): + serializers.serialize(self.serializer_name, [mv]) + + def test_serialize_prefetch_related_m2m(self): + # One query for the Article table and one for each prefetched m2m + # field. + with self.assertNumQueries(4): + serializers.serialize( + self.serializer_name, + Article.objects.prefetch_related("categories", "meta_data", "topics"), + ) + # One query for the Article table, and three m2m queries for each + # article. + with self.assertNumQueries(7): + serializers.serialize(self.serializer_name, Article.objects.all()) + + def test_serialize_with_null_pk(self): + """ + Serialized data with no primary key results + in a model instance with no id + """ + category = Category(name="Reference") + serial_str = serializers.serialize(self.serializer_name, [category]) + pk_value = self._get_pk_values(serial_str)[0] + self.assertFalse(pk_value) + + cat_obj = list(serializers.deserialize(self.serializer_name, serial_str))[ + 0 + ].object + self.assertIsNone(cat_obj.id) + + def test_float_serialization(self): + """Float values serialize and deserialize intact""" + sc = Score(score=3.4) + sc.save() + serial_str = serializers.serialize(self.serializer_name, [sc]) + deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str)) + self.assertEqual(deserial_objs[0].object.score, Approximate(3.4, places=1)) + + def test_deferred_field_serialization(self): + author = Author.objects.create(name="Victor Hugo") + author = Author.objects.defer("name").get(pk=author.pk) + serial_str = serializers.serialize(self.serializer_name, [author]) + deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str)) + self.assertIsInstance(deserial_objs[0].object, Author) + + def test_custom_field_serialization(self): + """Custom fields serialize and deserialize intact""" + team_str = "Spartak Moskva" + player = Player() + player.name = "Soslan Djanaev" + player.rank = 1 + player.team = Team(team_str) + player.save() + serial_str = serializers.serialize(self.serializer_name, Player.objects.all()) + team = self._get_field_values(serial_str, "team") + self.assertTrue(team) + self.assertEqual(team[0], team_str) + + deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str)) + self.assertEqual( + deserial_objs[0].object.team.to_string(), player.team.to_string() + ) + + def test_pre_1000ad_date(self): + """Year values before 1000AD are properly formatted""" + # Regression for #12524 -- dates before 1000AD get prefixed + # 0's on the year + a = Article.objects.create( + author=self.jane, + headline="Nobody remembers the early years", + pub_date=datetime(1, 2, 3, 4, 5, 6), + ) + + serial_str = serializers.serialize(self.serializer_name, [a]) + date_values = self._get_field_values(serial_str, "pub_date") + self.assertEqual(date_values[0].replace("T", " "), "0001-02-03 04:05:06") + + def test_pkless_serialized_strings(self): + """ + Serialized strings without PKs can be turned into models + """ + deserial_objs = list( + serializers.deserialize(self.serializer_name, self.pkless_str) + ) + for obj in deserial_objs: + self.assertFalse(obj.object.id) + obj.save() + self.assertEqual(Category.objects.count(), 5) + + def test_deterministic_mapping_ordering(self): + """Mapping such as fields should be deterministically ordered. (#24558)""" + output = serializers.serialize(self.serializer_name, [self.a1], indent=2) + categories = self.a1.categories.values_list("pk", flat=True) + self.assertEqual( + output, + self.mapping_ordering_str + % { + "article_pk": self.a1.pk, + "author_pk": self.a1.author_id, + "first_category_pk": categories[0], + "second_category_pk": categories[1], + }, + ) + + def test_deserialize_force_insert(self): + """Deserialized content can be saved with force_insert as a parameter.""" + serial_str = serializers.serialize(self.serializer_name, [self.a1]) + deserial_obj = list(serializers.deserialize(self.serializer_name, serial_str))[ + 0 + ] + with mock.patch("django.db.models.Model") as mock_model: + deserial_obj.save(force_insert=False) + mock_model.save_base.assert_called_with( + deserial_obj.object, raw=True, using=None, force_insert=False + ) + + @skipUnlessDBFeature("can_defer_constraint_checks") + def test_serialize_proxy_model(self): + BaseModel.objects.create(parent_data=1) + base_objects = BaseModel.objects.all() + proxy_objects = ProxyBaseModel.objects.all() + proxy_proxy_objects = ProxyProxyBaseModel.objects.all() + base_data = serializers.serialize("json", base_objects) + proxy_data = serializers.serialize("json", proxy_objects) + proxy_proxy_data = serializers.serialize("json", proxy_proxy_objects) + self.assertEqual(base_data, proxy_data.replace("proxy", "")) + self.assertEqual(base_data, proxy_proxy_data.replace("proxy", "")) + + def test_serialize_inherited_fields(self): + child_1 = Child.objects.create(parent_data="a", child_data="b") + child_2 = Child.objects.create(parent_data="c", child_data="d") + child_1.parent_m2m.add(child_2) + child_data = serializers.serialize(self.serializer_name, [child_1, child_2]) + self.assertEqual(self._get_field_values(child_data, "parent_m2m"), []) + self.assertEqual(self._get_field_values(child_data, "parent_data"), []) + + def test_serialize_only_pk(self): + with self.assertNumQueries(7) as ctx: + serializers.serialize( + self.serializer_name, + Article.objects.all(), + use_natural_foreign_keys=False, + ) + + categories_sql = ctx[1]["sql"] + self.assertNotIn(connection.ops.quote_name("meta_data_id"), categories_sql) + meta_data_sql = ctx[2]["sql"] + self.assertNotIn(connection.ops.quote_name("kind"), meta_data_sql) + topics_data_sql = ctx[3]["sql"] + self.assertNotIn(connection.ops.quote_name("category_id"), topics_data_sql) + + def test_serialize_no_only_pk_with_natural_keys(self): + with self.assertNumQueries(7) as ctx: + serializers.serialize( + self.serializer_name, + Article.objects.all(), + use_natural_foreign_keys=True, + ) + + categories_sql = ctx[1]["sql"] + self.assertNotIn(connection.ops.quote_name("meta_data_id"), categories_sql) + # CategoryMetaData has natural_key(). + meta_data_sql = ctx[2]["sql"] + self.assertIn(connection.ops.quote_name("kind"), meta_data_sql) + topics_data_sql = ctx[3]["sql"] + self.assertNotIn(connection.ops.quote_name("category_id"), topics_data_sql) + + +class SerializerAPITests(SimpleTestCase): + def test_stream_class(self): + class File: + def __init__(self): + self.lines = [] + + def write(self, line): + self.lines.append(line) + + def getvalue(self): + return "".join(self.lines) + + class Serializer(serializers.json.Serializer): + stream_class = File + + serializer = Serializer() + data = serializer.serialize([Score(id=1, score=3.4)]) + self.assertIs(serializer.stream_class, File) + self.assertIsInstance(serializer.stream, File) + self.assertEqual( + data, '[{"model": "serializers.score", "pk": 1, "fields": {"score": 3.4}}]' + ) + + +class SerializersTransactionTestBase: + available_apps = ["serializers"] + + @skipUnlessDBFeature("supports_forward_references") + def test_forward_refs(self): + """ + Objects ids can be referenced before they are + defined in the serialization data. + """ + # The deserialization process needs to run in a transaction in order + # to test forward reference handling. + with transaction.atomic(): + objs = serializers.deserialize(self.serializer_name, self.fwd_ref_str) + with connection.constraint_checks_disabled(): + for obj in objs: + obj.save() + + for model_cls in (Category, Author, Article): + self.assertEqual(model_cls.objects.count(), 1) + art_obj = Article.objects.all()[0] + self.assertEqual(art_obj.categories.count(), 1) + self.assertEqual(art_obj.author.name, "Agnes") + + +def register_tests(test_class, method_name, test_func, exclude=()): + """ + Dynamically create serializer tests to ensure that all registered + serializers are automatically tested. + """ + for format_ in serializers.get_serializer_formats(): + if format_ == "geojson" or format_ in exclude: + continue + decorated_func = skipIf( + isinstance(serializers.get_serializer(format_), serializers.BadSerializer), + "The Python library for the %s serializer is not installed." % format_, + )(test_func) + setattr( + test_class, method_name % format_, partialmethod(decorated_func, format_) + ) diff --git a/testbed/django__django/tests/servers/__init__.py b/testbed/django__django/tests/servers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/servers/another_app/__init__.py b/testbed/django__django/tests/servers/another_app/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/servers/another_app/static/another_app/another_app_static_file.txt b/testbed/django__django/tests/servers/another_app/static/another_app/another_app_static_file.txt new file mode 100644 index 0000000000000000000000000000000000000000..a2784fa8e29b91c329bcb629f54705030bd3d033 --- /dev/null +++ b/testbed/django__django/tests/servers/another_app/static/another_app/another_app_static_file.txt @@ -0,0 +1 @@ +static file from another_app diff --git a/testbed/django__django/tests/servers/fixtures/testdata.json b/testbed/django__django/tests/servers/fixtures/testdata.json new file mode 100644 index 0000000000000000000000000000000000000000..d81b2253d21e0e8d43dc02b578f96c8f3489d2f1 --- /dev/null +++ b/testbed/django__django/tests/servers/fixtures/testdata.json @@ -0,0 +1,16 @@ +[ + { + "pk": 1, + "model": "servers.person", + "fields": { + "name": "jane" + } + }, + { + "pk": 2, + "model": "servers.person", + "fields": { + "name": "robert" + } + } +] \ No newline at end of file diff --git a/testbed/django__django/tests/servers/media/example_media_file.txt b/testbed/django__django/tests/servers/media/example_media_file.txt new file mode 100644 index 0000000000000000000000000000000000000000..dd2dda94e301dd9725aac38519bcbb4039518d63 --- /dev/null +++ b/testbed/django__django/tests/servers/media/example_media_file.txt @@ -0,0 +1 @@ +example media file diff --git a/testbed/django__django/tests/servers/models.py b/testbed/django__django/tests/servers/models.py new file mode 100644 index 0000000000000000000000000000000000000000..b523bae6f2df4f92ab109dccb3ef11fca5e012ae --- /dev/null +++ b/testbed/django__django/tests/servers/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class Person(models.Model): + name = models.CharField(max_length=255) diff --git a/testbed/django__django/tests/servers/static/example_static_file.txt b/testbed/django__django/tests/servers/static/example_static_file.txt new file mode 100644 index 0000000000000000000000000000000000000000..b5d8342212b3a00c441d5bb889102061d214295c --- /dev/null +++ b/testbed/django__django/tests/servers/static/example_static_file.txt @@ -0,0 +1 @@ +example static file diff --git a/testbed/django__django/tests/servers/test_basehttp.py b/testbed/django__django/tests/servers/test_basehttp.py new file mode 100644 index 0000000000000000000000000000000000000000..1e535e933e24729e8cf418d76376a5b082e49e01 --- /dev/null +++ b/testbed/django__django/tests/servers/test_basehttp.py @@ -0,0 +1,191 @@ +from io import BytesIO +from socketserver import ThreadingMixIn + +from django.core.handlers.wsgi import WSGIRequest +from django.core.servers.basehttp import WSGIRequestHandler, WSGIServer +from django.test import SimpleTestCase +from django.test.client import RequestFactory +from django.test.utils import captured_stderr + + +class Stub(ThreadingMixIn): + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + def sendall(self, data): + self.makefile("wb").write(data) + + +class UnclosableBytesIO(BytesIO): + def close(self): + # WSGIRequestHandler closes the output file; we need to make this a + # no-op so we can still read its contents. + pass + + +class WSGIRequestHandlerTestCase(SimpleTestCase): + request_factory = RequestFactory() + + def test_log_message(self): + request = WSGIRequest(self.request_factory.get("/").environ) + request.makefile = lambda *args, **kwargs: BytesIO() + handler = WSGIRequestHandler(request, "192.168.0.2", None) + level_status_codes = { + "info": [200, 301, 304], + "warning": [400, 403, 404], + "error": [500, 503], + } + for level, status_codes in level_status_codes.items(): + for status_code in status_codes: + # The correct level gets the message. + with self.assertLogs("django.server", level.upper()) as cm: + handler.log_message("GET %s %s", "A", str(status_code)) + self.assertIn("GET A %d" % status_code, cm.output[0]) + # Incorrect levels don't have any messages. + for wrong_level in level_status_codes: + if wrong_level != level: + with self.assertLogs("django.server", "INFO") as cm: + handler.log_message("GET %s %s", "A", str(status_code)) + self.assertNotEqual( + cm.records[0].levelname, wrong_level.upper() + ) + + def test_https(self): + request = WSGIRequest(self.request_factory.get("/").environ) + request.makefile = lambda *args, **kwargs: BytesIO() + + handler = WSGIRequestHandler(request, "192.168.0.2", None) + + with self.assertLogs("django.server", "ERROR") as cm: + handler.log_message("GET %s %s", "\x16\x03", "4") + self.assertEqual( + "You're accessing the development server over HTTPS, " + "but it only supports HTTP.", + cm.records[0].getMessage(), + ) + + def test_strips_underscore_headers(self): + """WSGIRequestHandler ignores headers containing underscores. + + This follows the lead of nginx and Apache 2.4, and is to avoid + ambiguity between dashes and underscores in mapping to WSGI environ, + which can have security implications. + """ + + def test_app(environ, start_response): + """A WSGI app that just reflects its HTTP environ.""" + start_response("200 OK", []) + http_environ_items = sorted( + "%s:%s" % (k, v) for k, v in environ.items() if k.startswith("HTTP_") + ) + yield (",".join(http_environ_items)).encode() + + rfile = BytesIO() + rfile.write(b"GET / HTTP/1.0\r\n") + rfile.write(b"Some-Header: good\r\n") + rfile.write(b"Some_Header: bad\r\n") + rfile.write(b"Other_Header: bad\r\n") + rfile.seek(0) + + wfile = UnclosableBytesIO() + + def makefile(mode, *a, **kw): + if mode == "rb": + return rfile + elif mode == "wb": + return wfile + + request = Stub(makefile=makefile) + server = Stub(base_environ={}, get_app=lambda: test_app) + + # Prevent logging from appearing in test output. + with self.assertLogs("django.server", "INFO"): + # instantiating a handler runs the request as side effect + WSGIRequestHandler(request, "192.168.0.2", server) + + wfile.seek(0) + body = list(wfile.readlines())[-1] + + self.assertEqual(body, b"HTTP_SOME_HEADER:good") + + def test_no_body_returned_for_head_requests(self): + hello_world_body = b"Hello World" + content_length = len(hello_world_body) + + def test_app(environ, start_response): + """A WSGI app that returns a hello world.""" + start_response("200 OK", []) + return [hello_world_body] + + rfile = BytesIO(b"GET / HTTP/1.0\r\n") + rfile.seek(0) + + wfile = UnclosableBytesIO() + + def makefile(mode, *a, **kw): + if mode == "rb": + return rfile + elif mode == "wb": + return wfile + + request = Stub(makefile=makefile) + server = Stub(base_environ={}, get_app=lambda: test_app) + + # Prevent logging from appearing in test output. + with self.assertLogs("django.server", "INFO"): + # Instantiating a handler runs the request as side effect. + WSGIRequestHandler(request, "192.168.0.2", server) + + wfile.seek(0) + lines = list(wfile.readlines()) + body = lines[-1] + # The body is returned in a GET response. + self.assertEqual(body, hello_world_body) + self.assertIn(f"Content-Length: {content_length}\r\n".encode(), lines) + self.assertNotIn(b"Connection: close\r\n", lines) + + rfile = BytesIO(b"HEAD / HTTP/1.0\r\n") + rfile.seek(0) + wfile = UnclosableBytesIO() + + with self.assertLogs("django.server", "INFO"): + WSGIRequestHandler(request, "192.168.0.2", server) + + wfile.seek(0) + lines = list(wfile.readlines()) + body = lines[-1] + # The body is not returned in a HEAD response. + self.assertEqual(body, b"\r\n") + self.assertIs( + any([line.startswith(b"Content-Length:") for line in lines]), False + ) + self.assertNotIn(b"Connection: close\r\n", lines) + + +class WSGIServerTestCase(SimpleTestCase): + request_factory = RequestFactory() + + def test_broken_pipe_errors(self): + """WSGIServer handles broken pipe errors.""" + request = WSGIRequest(self.request_factory.get("/").environ) + client_address = ("192.168.2.0", 8080) + msg = f"- Broken pipe from {client_address}" + tests = [ + BrokenPipeError, + ConnectionAbortedError, + ConnectionResetError, + ] + for exception in tests: + with self.subTest(exception=exception): + try: + server = WSGIServer(("localhost", 0), WSGIRequestHandler) + try: + raise exception() + except Exception: + with captured_stderr() as err: + with self.assertLogs("django.server", "INFO") as cm: + server.handle_error(request, client_address) + self.assertEqual(err.getvalue(), "") + self.assertEqual(cm.records[0].getMessage(), msg) + finally: + server.server_close() diff --git a/testbed/django__django/tests/servers/test_liveserverthread.py b/testbed/django__django/tests/servers/test_liveserverthread.py new file mode 100644 index 0000000000000000000000000000000000000000..8ed70f3202cdee8dfb2b7c0ab02677c46c3dd76e --- /dev/null +++ b/testbed/django__django/tests/servers/test_liveserverthread.py @@ -0,0 +1,46 @@ +from django.db import DEFAULT_DB_ALIAS, connections +from django.test import LiveServerTestCase, TransactionTestCase +from django.test.testcases import LiveServerThread + + +# Use TransactionTestCase instead of TestCase to run outside of a transaction, +# otherwise closing the connection would implicitly rollback and not set the +# connection to None. +class LiveServerThreadTest(TransactionTestCase): + available_apps = [] + + def run_live_server_thread(self, connections_override=None): + thread = LiveServerTestCase._create_server_thread(connections_override) + thread.daemon = True + thread.start() + thread.is_ready.wait() + thread.terminate() + + def test_closes_connections(self): + conn = connections[DEFAULT_DB_ALIAS] + # Pass a connection to the thread to check they are being closed. + connections_override = {DEFAULT_DB_ALIAS: conn} + # Open a connection to the database. + conn.connect() + conn.inc_thread_sharing() + try: + self.assertIsNotNone(conn.connection) + self.run_live_server_thread(connections_override) + self.assertIsNone(conn.connection) + finally: + conn.dec_thread_sharing() + + def test_server_class(self): + class FakeServer: + def __init__(*args, **kwargs): + pass + + class MyServerThread(LiveServerThread): + server_class = FakeServer + + class MyServerTestCase(LiveServerTestCase): + server_thread_class = MyServerThread + + thread = MyServerTestCase._create_server_thread(None) + server = thread._create_server() + self.assertIs(type(server), FakeServer) diff --git a/testbed/django__django/tests/servers/tests.py b/testbed/django__django/tests/servers/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..4dece98ce98ceb0b546b3760c5e33d578ff0a0a6 --- /dev/null +++ b/testbed/django__django/tests/servers/tests.py @@ -0,0 +1,422 @@ +""" +Tests for django.core.servers. +""" +import errno +import os +import socket +import threading +import unittest +from http.client import HTTPConnection +from urllib.error import HTTPError +from urllib.parse import urlencode +from urllib.request import urlopen + +from django.conf import settings +from django.core.servers.basehttp import ThreadedWSGIServer, WSGIServer +from django.db import DEFAULT_DB_ALIAS, connection, connections +from django.test import LiveServerTestCase, override_settings +from django.test.testcases import LiveServerThread, QuietWSGIRequestHandler + +from .models import Person + +TEST_ROOT = os.path.dirname(__file__) +TEST_SETTINGS = { + "MEDIA_URL": "media/", + "MEDIA_ROOT": os.path.join(TEST_ROOT, "media"), + "STATIC_URL": "static/", + "STATIC_ROOT": os.path.join(TEST_ROOT, "static"), +} + + +@override_settings(ROOT_URLCONF="servers.urls", **TEST_SETTINGS) +class LiveServerBase(LiveServerTestCase): + available_apps = [ + "servers", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + ] + fixtures = ["testdata.json"] + + def urlopen(self, url): + return urlopen(self.live_server_url + url) + + +class CloseConnectionTestServer(ThreadedWSGIServer): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # This event is set right after the first time a request closes its + # database connections. + self._connections_closed = threading.Event() + + def _close_connections(self): + super()._close_connections() + self._connections_closed.set() + + +class CloseConnectionTestLiveServerThread(LiveServerThread): + server_class = CloseConnectionTestServer + + def _create_server(self, connections_override=None): + return super()._create_server(connections_override=self.connections_override) + + +class LiveServerTestCloseConnectionTest(LiveServerBase): + server_thread_class = CloseConnectionTestLiveServerThread + + @classmethod + def _make_connections_override(cls): + conn = connections[DEFAULT_DB_ALIAS] + cls.conn = conn + cls.old_conn_max_age = conn.settings_dict["CONN_MAX_AGE"] + # Set the connection's CONN_MAX_AGE to None to simulate the + # CONN_MAX_AGE setting being set to None on the server. This prevents + # Django from closing the connection and allows testing that + # ThreadedWSGIServer closes connections. + conn.settings_dict["CONN_MAX_AGE"] = None + # Pass a database connection through to the server to check it is being + # closed by ThreadedWSGIServer. + return {DEFAULT_DB_ALIAS: conn} + + @classmethod + def tearDownConnectionTest(cls): + cls.conn.settings_dict["CONN_MAX_AGE"] = cls.old_conn_max_age + + @classmethod + def tearDownClass(cls): + cls.tearDownConnectionTest() + super().tearDownClass() + + def test_closes_connections(self): + # The server's request thread sets this event after closing + # its database connections. + closed_event = self.server_thread.httpd._connections_closed + conn = self.conn + # Open a connection to the database. + conn.connect() + self.assertIsNotNone(conn.connection) + with self.urlopen("/model_view/") as f: + # The server can access the database. + self.assertCountEqual(f.read().splitlines(), [b"jane", b"robert"]) + # Wait for the server's request thread to close the connection. + # A timeout of 0.1 seconds should be more than enough. If the wait + # times out, the assertion after should fail. + closed_event.wait(timeout=0.1) + self.assertIsNone(conn.connection) + + +@unittest.skipUnless(connection.vendor == "sqlite", "SQLite specific test.") +class LiveServerInMemoryDatabaseLockTest(LiveServerBase): + def test_in_memory_database_lock(self): + """ + With a threaded LiveServer and an in-memory database, an error can + occur when 2 requests reach the server and try to lock the database + at the same time, if the requests do not share the same database + connection. + """ + conn = self.server_thread.connections_override[DEFAULT_DB_ALIAS] + # Open a connection to the database. + conn.connect() + # Create a transaction to lock the database. + cursor = conn.cursor() + cursor.execute("BEGIN IMMEDIATE TRANSACTION") + try: + with self.urlopen("/create_model_instance/") as f: + self.assertEqual(f.status, 200) + except HTTPError: + self.fail("Unexpected error due to a database lock.") + finally: + # Release the transaction. + cursor.execute("ROLLBACK") + + +class FailingLiveServerThread(LiveServerThread): + def _create_server(self, connections_override=None): + raise RuntimeError("Error creating server.") + + +class LiveServerTestCaseSetupTest(LiveServerBase): + server_thread_class = FailingLiveServerThread + + @classmethod + def check_allowed_hosts(cls, expected): + if settings.ALLOWED_HOSTS != expected: + raise RuntimeError(f"{settings.ALLOWED_HOSTS} != {expected}") + + @classmethod + def setUpClass(cls): + cls.check_allowed_hosts(["testserver"]) + try: + super().setUpClass() + except RuntimeError: + # LiveServerTestCase's change to ALLOWED_HOSTS should be reverted. + cls.doClassCleanups() + cls.check_allowed_hosts(["testserver"]) + else: + raise RuntimeError("Server did not fail.") + cls.set_up_called = True + + def test_set_up_class(self): + self.assertIs(self.set_up_called, True) + + +class LiveServerAddress(LiveServerBase): + @classmethod + def setUpClass(cls): + super().setUpClass() + # put it in a list to prevent descriptor lookups in test + cls.live_server_url_test = [cls.live_server_url] + + def test_live_server_url_is_class_property(self): + self.assertIsInstance(self.live_server_url_test[0], str) + self.assertEqual(self.live_server_url_test[0], self.live_server_url) + + +class LiveServerSingleThread(LiveServerThread): + def _create_server(self, connections_override=None): + return WSGIServer( + (self.host, self.port), QuietWSGIRequestHandler, allow_reuse_address=False + ) + + +class SingleThreadLiveServerTestCase(LiveServerTestCase): + server_thread_class = LiveServerSingleThread + + +class LiveServerViews(LiveServerBase): + def test_protocol(self): + """Launched server serves with HTTP 1.1.""" + with self.urlopen("/example_view/") as f: + self.assertEqual(f.version, 11) + + def test_closes_connection_without_content_length(self): + """ + An HTTP 1.1 server is supposed to support keep-alive. Since our + development server is rather simple we support it only in cases where + we can detect a content length from the response. This should be doable + for all simple views and streaming responses where an iterable with + length of one is passed. The latter follows as result of `set_content_length` + from https://github.com/python/cpython/blob/main/Lib/wsgiref/handlers.py. + + If we cannot detect a content length we explicitly set the `Connection` + header to `close` to notify the client that we do not actually support + it. + """ + conn = HTTPConnection( + LiveServerViews.server_thread.host, + LiveServerViews.server_thread.port, + timeout=1, + ) + try: + conn.request( + "GET", "/streaming_example_view/", headers={"Connection": "keep-alive"} + ) + response = conn.getresponse() + self.assertTrue(response.will_close) + self.assertEqual(response.read(), b"Iamastream") + self.assertEqual(response.status, 200) + self.assertEqual(response.getheader("Connection"), "close") + + conn.request( + "GET", "/streaming_example_view/", headers={"Connection": "close"} + ) + response = conn.getresponse() + self.assertTrue(response.will_close) + self.assertEqual(response.read(), b"Iamastream") + self.assertEqual(response.status, 200) + self.assertEqual(response.getheader("Connection"), "close") + finally: + conn.close() + + def test_keep_alive_on_connection_with_content_length(self): + """ + See `test_closes_connection_without_content_length` for details. This + is a follow up test, which ensure that we do not close the connection + if not needed, hence allowing us to take advantage of keep-alive. + """ + conn = HTTPConnection( + LiveServerViews.server_thread.host, LiveServerViews.server_thread.port + ) + try: + conn.request("GET", "/example_view/", headers={"Connection": "keep-alive"}) + response = conn.getresponse() + self.assertFalse(response.will_close) + self.assertEqual(response.read(), b"example view") + self.assertEqual(response.status, 200) + self.assertIsNone(response.getheader("Connection")) + + conn.request("GET", "/example_view/", headers={"Connection": "close"}) + response = conn.getresponse() + self.assertFalse(response.will_close) + self.assertEqual(response.read(), b"example view") + self.assertEqual(response.status, 200) + self.assertIsNone(response.getheader("Connection")) + finally: + conn.close() + + def test_keep_alive_connection_clears_previous_request_data(self): + conn = HTTPConnection( + LiveServerViews.server_thread.host, LiveServerViews.server_thread.port + ) + try: + conn.request( + "POST", "/method_view/", b"{}", headers={"Connection": "keep-alive"} + ) + response = conn.getresponse() + self.assertFalse(response.will_close) + self.assertEqual(response.status, 200) + self.assertEqual(response.read(), b"POST") + + conn.request( + "POST", "/method_view/", b"{}", headers={"Connection": "close"} + ) + response = conn.getresponse() + self.assertFalse(response.will_close) + self.assertEqual(response.status, 200) + self.assertEqual(response.read(), b"POST") + finally: + conn.close() + + def test_404(self): + with self.assertRaises(HTTPError) as err: + self.urlopen("/") + err.exception.close() + self.assertEqual(err.exception.code, 404, "Expected 404 response") + + def test_view(self): + with self.urlopen("/example_view/") as f: + self.assertEqual(f.read(), b"example view") + + def test_static_files(self): + with self.urlopen("/static/example_static_file.txt") as f: + self.assertEqual(f.read().rstrip(b"\r\n"), b"example static file") + + def test_no_collectstatic_emulation(self): + """ + LiveServerTestCase reports a 404 status code when HTTP client + tries to access a static file that isn't explicitly put under + STATIC_ROOT. + """ + with self.assertRaises(HTTPError) as err: + self.urlopen("/static/another_app/another_app_static_file.txt") + err.exception.close() + self.assertEqual(err.exception.code, 404, "Expected 404 response") + + def test_media_files(self): + with self.urlopen("/media/example_media_file.txt") as f: + self.assertEqual(f.read().rstrip(b"\r\n"), b"example media file") + + def test_environ(self): + with self.urlopen("/environ_view/?%s" % urlencode({"q": "тест"})) as f: + self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read()) + + +@override_settings(ROOT_URLCONF="servers.urls") +class SingleThreadLiveServerViews(SingleThreadLiveServerTestCase): + available_apps = ["servers"] + + def test_closes_connection_with_content_length(self): + """ + Contrast to + LiveServerViews.test_keep_alive_on_connection_with_content_length(). + Persistent connections require threading server. + """ + conn = HTTPConnection( + SingleThreadLiveServerViews.server_thread.host, + SingleThreadLiveServerViews.server_thread.port, + timeout=1, + ) + try: + conn.request("GET", "/example_view/", headers={"Connection": "keep-alive"}) + response = conn.getresponse() + self.assertTrue(response.will_close) + self.assertEqual(response.read(), b"example view") + self.assertEqual(response.status, 200) + self.assertEqual(response.getheader("Connection"), "close") + finally: + conn.close() + + +class LiveServerDatabase(LiveServerBase): + def test_fixtures_loaded(self): + """ + Fixtures are properly loaded and visible to the live server thread. + """ + with self.urlopen("/model_view/") as f: + self.assertCountEqual(f.read().splitlines(), [b"jane", b"robert"]) + + def test_database_writes(self): + """ + Data written to the database by a view can be read. + """ + with self.urlopen("/create_model_instance/"): + pass + self.assertQuerySetEqual( + Person.objects.order_by("pk"), + ["jane", "robert", "emily"], + lambda b: b.name, + ) + + +class LiveServerPort(LiveServerBase): + def test_port_bind(self): + """ + Each LiveServerTestCase binds to a unique port or fails to start a + server thread when run concurrently (#26011). + """ + TestCase = type("TestCase", (LiveServerBase,), {}) + try: + TestCase._start_server_thread() + except OSError as e: + if e.errno == errno.EADDRINUSE: + # We're out of ports, LiveServerTestCase correctly fails with + # an OSError. + return + # Unexpected error. + raise + try: + self.assertNotEqual( + self.live_server_url, + TestCase.live_server_url, + f"Acquired duplicate server addresses for server threads: " + f"{self.live_server_url}", + ) + finally: + TestCase.doClassCleanups() + + def test_specified_port_bind(self): + """LiveServerTestCase.port customizes the server's port.""" + TestCase = type("TestCase", (LiveServerBase,), {}) + # Find an open port and tell TestCase to use it. + s = socket.socket() + s.bind(("", 0)) + TestCase.port = s.getsockname()[1] + s.close() + TestCase._start_server_thread() + try: + self.assertEqual( + TestCase.port, + TestCase.server_thread.port, + f"Did not use specified port for LiveServerTestCase thread: " + f"{TestCase.port}", + ) + finally: + TestCase.doClassCleanups() + + +class LiveServerThreadedTests(LiveServerBase): + """If LiveServerTestCase isn't threaded, these tests will hang.""" + + def test_view_calls_subview(self): + url = "/subview_calling_view/?%s" % urlencode({"url": self.live_server_url}) + with self.urlopen(url) as f: + self.assertEqual(f.read(), b"subview calling view: subview") + + def test_check_model_instance_from_subview(self): + url = "/check_model_instance_from_subview/?%s" % urlencode( + { + "url": self.live_server_url, + } + ) + with self.urlopen(url) as f: + self.assertIn(b"emily", f.read()) diff --git a/testbed/django__django/tests/servers/urls.py b/testbed/django__django/tests/servers/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..54c2dcb834f5c272b00004332d1afdbefc1d453f --- /dev/null +++ b/testbed/django__django/tests/servers/urls.py @@ -0,0 +1,15 @@ +from django.urls import path + +from . import views + +urlpatterns = [ + path("example_view/", views.example_view), + path("streaming_example_view/", views.streaming_example_view), + path("model_view/", views.model_view), + path("create_model_instance/", views.create_model_instance), + path("environ_view/", views.environ_view), + path("subview_calling_view/", views.subview_calling_view), + path("subview/", views.subview), + path("check_model_instance_from_subview/", views.check_model_instance_from_subview), + path("method_view/", views.method_view), +] diff --git a/testbed/django__django/tests/servers/views.py b/testbed/django__django/tests/servers/views.py new file mode 100644 index 0000000000000000000000000000000000000000..97a467f0b078acc2fb044393d29369d036f45fa0 --- /dev/null +++ b/testbed/django__django/tests/servers/views.py @@ -0,0 +1,52 @@ +from urllib.request import urlopen + +from django.http import HttpResponse, StreamingHttpResponse +from django.views.decorators.csrf import csrf_exempt + +from .models import Person + + +def example_view(request): + return HttpResponse("example view") + + +def streaming_example_view(request): + return StreamingHttpResponse((b"I", b"am", b"a", b"stream")) + + +def model_view(request): + people = Person.objects.all() + return HttpResponse("\n".join(person.name for person in people)) + + +def create_model_instance(request): + person = Person(name="emily") + person.save() + return HttpResponse() + + +def environ_view(request): + return HttpResponse( + "\n".join("%s: %r" % (k, v) for k, v in request.environ.items()) + ) + + +def subview(request): + return HttpResponse("subview") + + +def subview_calling_view(request): + with urlopen(request.GET["url"] + "/subview/") as response: + return HttpResponse("subview calling view: {}".format(response.read().decode())) + + +def check_model_instance_from_subview(request): + with urlopen(request.GET["url"] + "/create_model_instance/"): + pass + with urlopen(request.GET["url"] + "/model_view/") as response: + return HttpResponse("subview calling view: {}".format(response.read().decode())) + + +@csrf_exempt +def method_view(request): + return HttpResponse(request.method) diff --git a/testbed/django__django/tests/sessions_tests/__init__.py b/testbed/django__django/tests/sessions_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/sessions_tests/models.py b/testbed/django__django/tests/sessions_tests/models.py new file mode 100644 index 0000000000000000000000000000000000000000..6eda26f22aff44651c7277ee0665a66858b78945 --- /dev/null +++ b/testbed/django__django/tests/sessions_tests/models.py @@ -0,0 +1,45 @@ +""" +This custom Session model adds an extra column to store an account ID. In +real-world applications, it gives you the option of querying the database for +all active sessions for a particular account. +""" +from django.contrib.sessions.backends.db import SessionStore as DBStore +from django.contrib.sessions.base_session import AbstractBaseSession +from django.db import models + + +class CustomSession(AbstractBaseSession): + """ + A session model with a column for an account ID. + """ + + account_id = models.IntegerField(null=True, db_index=True) + + @classmethod + def get_session_store_class(cls): + return SessionStore + + +class SessionStore(DBStore): + """ + A database session store, that handles updating the account ID column + inside the custom session model. + """ + + @classmethod + def get_model_class(cls): + return CustomSession + + def create_model_instance(self, data): + obj = super().create_model_instance(data) + + try: + account_id = int(data.get("_auth_user_id")) + except (ValueError, TypeError): + account_id = None + obj.account_id = account_id + + return obj + + def get_session_cookie_age(self): + return 60 * 60 * 24 # One day. diff --git a/testbed/django__django/tests/sessions_tests/no_clear_expired.py b/testbed/django__django/tests/sessions_tests/no_clear_expired.py new file mode 100644 index 0000000000000000000000000000000000000000..4a229868ce85fe66b251fa265829dfc25116a9ed --- /dev/null +++ b/testbed/django__django/tests/sessions_tests/no_clear_expired.py @@ -0,0 +1,7 @@ +from django.contrib.sessions.backends.base import SessionBase + + +class SessionStore(SessionBase): + """Session store without support for clearing expired sessions.""" + + pass diff --git a/testbed/django__django/tests/sessions_tests/tests.py b/testbed/django__django/tests/sessions_tests/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..ea2b201c2061b87bd1b5585b1e966f3297b48169 --- /dev/null +++ b/testbed/django__django/tests/sessions_tests/tests.py @@ -0,0 +1,931 @@ +import base64 +import os +import shutil +import string +import tempfile +import unittest +from datetime import timedelta +from http import cookies +from pathlib import Path +from unittest import mock + +from django.conf import settings +from django.contrib.sessions.backends.base import UpdateError +from django.contrib.sessions.backends.cache import SessionStore as CacheSession +from django.contrib.sessions.backends.cached_db import SessionStore as CacheDBSession +from django.contrib.sessions.backends.db import SessionStore as DatabaseSession +from django.contrib.sessions.backends.file import SessionStore as FileSession +from django.contrib.sessions.backends.signed_cookies import ( + SessionStore as CookieSession, +) +from django.contrib.sessions.exceptions import InvalidSessionKey, SessionInterrupted +from django.contrib.sessions.middleware import SessionMiddleware +from django.contrib.sessions.models import Session +from django.contrib.sessions.serializers import JSONSerializer +from django.core import management +from django.core.cache import caches +from django.core.cache.backends.base import InvalidCacheBackendError +from django.core.exceptions import ImproperlyConfigured +from django.core.signing import TimestampSigner +from django.http import HttpResponse +from django.test import ( + RequestFactory, + SimpleTestCase, + TestCase, + ignore_warnings, + override_settings, +) +from django.utils import timezone + +from .models import SessionStore as CustomDatabaseSession + + +class SessionTestsMixin: + # This does not inherit from TestCase to avoid any tests being run with this + # class, which wouldn't work, and to allow different TestCase subclasses to + # be used. + + backend = None # subclasses must specify + + def setUp(self): + self.session = self.backend() + + def tearDown(self): + # NB: be careful to delete any sessions created; stale sessions fill up + # the /tmp (with some backends) and eventually overwhelm it after lots + # of runs (think buildbots) + self.session.delete() + + def test_new_session(self): + self.assertIs(self.session.modified, False) + self.assertIs(self.session.accessed, False) + + def test_get_empty(self): + self.assertIsNone(self.session.get("cat")) + + def test_store(self): + self.session["cat"] = "dog" + self.assertIs(self.session.modified, True) + self.assertEqual(self.session.pop("cat"), "dog") + + def test_pop(self): + self.session["some key"] = "exists" + # Need to reset these to pretend we haven't accessed it: + self.accessed = False + self.modified = False + + self.assertEqual(self.session.pop("some key"), "exists") + self.assertIs(self.session.accessed, True) + self.assertIs(self.session.modified, True) + self.assertIsNone(self.session.get("some key")) + + def test_pop_default(self): + self.assertEqual( + self.session.pop("some key", "does not exist"), "does not exist" + ) + self.assertIs(self.session.accessed, True) + self.assertIs(self.session.modified, False) + + def test_pop_default_named_argument(self): + self.assertEqual( + self.session.pop("some key", default="does not exist"), "does not exist" + ) + self.assertIs(self.session.accessed, True) + self.assertIs(self.session.modified, False) + + def test_pop_no_default_keyerror_raised(self): + with self.assertRaises(KeyError): + self.session.pop("some key") + + def test_setdefault(self): + self.assertEqual(self.session.setdefault("foo", "bar"), "bar") + self.assertEqual(self.session.setdefault("foo", "baz"), "bar") + self.assertIs(self.session.accessed, True) + self.assertIs(self.session.modified, True) + + def test_update(self): + self.session.update({"update key": 1}) + self.assertIs(self.session.accessed, True) + self.assertIs(self.session.modified, True) + self.assertEqual(self.session.get("update key", None), 1) + + def test_has_key(self): + self.session["some key"] = 1 + self.session.modified = False + self.session.accessed = False + self.assertIn("some key", self.session) + self.assertIs(self.session.accessed, True) + self.assertIs(self.session.modified, False) + + def test_values(self): + self.assertEqual(list(self.session.values()), []) + self.assertIs(self.session.accessed, True) + self.session["some key"] = 1 + self.session.modified = False + self.session.accessed = False + self.assertEqual(list(self.session.values()), [1]) + self.assertIs(self.session.accessed, True) + self.assertIs(self.session.modified, False) + + def test_keys(self): + self.session["x"] = 1 + self.session.modified = False + self.session.accessed = False + self.assertEqual(list(self.session.keys()), ["x"]) + self.assertIs(self.session.accessed, True) + self.assertIs(self.session.modified, False) + + def test_items(self): + self.session["x"] = 1 + self.session.modified = False + self.session.accessed = False + self.assertEqual(list(self.session.items()), [("x", 1)]) + self.assertIs(self.session.accessed, True) + self.assertIs(self.session.modified, False) + + def test_clear(self): + self.session["x"] = 1 + self.session.modified = False + self.session.accessed = False + self.assertEqual(list(self.session.items()), [("x", 1)]) + self.session.clear() + self.assertEqual(list(self.session.items()), []) + self.assertIs(self.session.accessed, True) + self.assertIs(self.session.modified, True) + + def test_save(self): + self.session.save() + self.assertIs(self.session.exists(self.session.session_key), True) + + def test_delete(self): + self.session.save() + self.session.delete(self.session.session_key) + self.assertIs(self.session.exists(self.session.session_key), False) + + def test_flush(self): + self.session["foo"] = "bar" + self.session.save() + prev_key = self.session.session_key + self.session.flush() + self.assertIs(self.session.exists(prev_key), False) + self.assertNotEqual(self.session.session_key, prev_key) + self.assertIsNone(self.session.session_key) + self.assertIs(self.session.modified, True) + self.assertIs(self.session.accessed, True) + + def test_cycle(self): + self.session["a"], self.session["b"] = "c", "d" + self.session.save() + prev_key = self.session.session_key + prev_data = list(self.session.items()) + self.session.cycle_key() + self.assertIs(self.session.exists(prev_key), False) + self.assertNotEqual(self.session.session_key, prev_key) + self.assertEqual(list(self.session.items()), prev_data) + + def test_cycle_with_no_session_cache(self): + self.session["a"], self.session["b"] = "c", "d" + self.session.save() + prev_data = self.session.items() + self.session = self.backend(self.session.session_key) + self.assertIs(hasattr(self.session, "_session_cache"), False) + self.session.cycle_key() + self.assertCountEqual(self.session.items(), prev_data) + + def test_save_doesnt_clear_data(self): + self.session["a"] = "b" + self.session.save() + self.assertEqual(self.session["a"], "b") + + def test_invalid_key(self): + # Submitting an invalid session key (either by guessing, or if the db has + # removed the key) results in a new key being generated. + try: + session = self.backend("1") + session.save() + self.assertNotEqual(session.session_key, "1") + self.assertIsNone(session.get("cat")) + session.delete() + finally: + # Some backends leave a stale cache entry for the invalid + # session key; make sure that entry is manually deleted + session.delete("1") + + def test_session_key_empty_string_invalid(self): + """Falsey values (Such as an empty string) are rejected.""" + self.session._session_key = "" + self.assertIsNone(self.session.session_key) + + def test_session_key_too_short_invalid(self): + """Strings shorter than 8 characters are rejected.""" + self.session._session_key = "1234567" + self.assertIsNone(self.session.session_key) + + def test_session_key_valid_string_saved(self): + """Strings of length 8 and up are accepted and stored.""" + self.session._session_key = "12345678" + self.assertEqual(self.session.session_key, "12345678") + + def test_session_key_is_read_only(self): + def set_session_key(session): + session.session_key = session._get_new_session_key() + + with self.assertRaises(AttributeError): + set_session_key(self.session) + + # Custom session expiry + def test_default_expiry(self): + # A normal session has a max age equal to settings + self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) + + # So does a custom session with an idle expiration time of 0 (but it'll + # expire at browser close) + self.session.set_expiry(0) + self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) + + def test_custom_expiry_seconds(self): + modification = timezone.now() + + self.session.set_expiry(10) + + date = self.session.get_expiry_date(modification=modification) + self.assertEqual(date, modification + timedelta(seconds=10)) + + age = self.session.get_expiry_age(modification=modification) + self.assertEqual(age, 10) + + def test_custom_expiry_timedelta(self): + modification = timezone.now() + + # Mock timezone.now, because set_expiry calls it on this code path. + original_now = timezone.now + try: + timezone.now = lambda: modification + self.session.set_expiry(timedelta(seconds=10)) + finally: + timezone.now = original_now + + date = self.session.get_expiry_date(modification=modification) + self.assertEqual(date, modification + timedelta(seconds=10)) + + age = self.session.get_expiry_age(modification=modification) + self.assertEqual(age, 10) + + def test_custom_expiry_datetime(self): + modification = timezone.now() + + self.session.set_expiry(modification + timedelta(seconds=10)) + + date = self.session.get_expiry_date(modification=modification) + self.assertEqual(date, modification + timedelta(seconds=10)) + + age = self.session.get_expiry_age(modification=modification) + self.assertEqual(age, 10) + + def test_custom_expiry_reset(self): + self.session.set_expiry(None) + self.session.set_expiry(10) + self.session.set_expiry(None) + self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) + + def test_get_expire_at_browser_close(self): + # Tests get_expire_at_browser_close with different settings and different + # set_expiry calls + with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False): + self.session.set_expiry(10) + self.assertIs(self.session.get_expire_at_browser_close(), False) + + self.session.set_expiry(0) + self.assertIs(self.session.get_expire_at_browser_close(), True) + + self.session.set_expiry(None) + self.assertIs(self.session.get_expire_at_browser_close(), False) + + with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True): + self.session.set_expiry(10) + self.assertIs(self.session.get_expire_at_browser_close(), False) + + self.session.set_expiry(0) + self.assertIs(self.session.get_expire_at_browser_close(), True) + + self.session.set_expiry(None) + self.assertIs(self.session.get_expire_at_browser_close(), True) + + def test_decode(self): + # Ensure we can decode what we encode + data = {"a test key": "a test value"} + encoded = self.session.encode(data) + self.assertEqual(self.session.decode(encoded), data) + + def test_decode_failure_logged_to_security(self): + tests = [ + base64.b64encode(b"flaskdj:alkdjf").decode("ascii"), + "bad:encoded:value", + ] + for encoded in tests: + with self.subTest(encoded=encoded): + with self.assertLogs( + "django.security.SuspiciousSession", "WARNING" + ) as cm: + self.assertEqual(self.session.decode(encoded), {}) + # The failed decode is logged. + self.assertIn("Session data corrupted", cm.output[0]) + + def test_decode_serializer_exception(self): + signer = TimestampSigner(salt=self.session.key_salt) + encoded = signer.sign(b"invalid data") + self.assertEqual(self.session.decode(encoded), {}) + + def test_actual_expiry(self): + old_session_key = None + new_session_key = None + try: + self.session["foo"] = "bar" + self.session.set_expiry(-timedelta(seconds=10)) + self.session.save() + old_session_key = self.session.session_key + # With an expiry date in the past, the session expires instantly. + new_session = self.backend(self.session.session_key) + new_session_key = new_session.session_key + self.assertNotIn("foo", new_session) + finally: + self.session.delete(old_session_key) + self.session.delete(new_session_key) + + def test_session_load_does_not_create_record(self): + """ + Loading an unknown session key does not create a session record. + + Creating session records on load is a DOS vulnerability. + """ + session = self.backend("someunknownkey") + session.load() + + self.assertIsNone(session.session_key) + self.assertIs(session.exists(session.session_key), False) + # provided unknown key was cycled, not reused + self.assertNotEqual(session.session_key, "someunknownkey") + + def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self): + """ + Sessions shouldn't be resurrected by a concurrent request. + """ + # Create new session. + s1 = self.backend() + s1["test_data"] = "value1" + s1.save(must_create=True) + + # Logout in another context. + s2 = self.backend(s1.session_key) + s2.delete() + + # Modify session in first context. + s1["test_data"] = "value2" + with self.assertRaises(UpdateError): + # This should throw an exception as the session is deleted, not + # resurrect the session. + s1.save() + + self.assertEqual(s1.load(), {}) + + +class DatabaseSessionTests(SessionTestsMixin, TestCase): + backend = DatabaseSession + session_engine = "django.contrib.sessions.backends.db" + + @property + def model(self): + return self.backend.get_model_class() + + def test_session_str(self): + "Session repr should be the session key." + self.session["x"] = 1 + self.session.save() + + session_key = self.session.session_key + s = self.model.objects.get(session_key=session_key) + + self.assertEqual(str(s), session_key) + + def test_session_get_decoded(self): + """ + Test we can use Session.get_decoded to retrieve data stored + in normal way + """ + self.session["x"] = 1 + self.session.save() + + s = self.model.objects.get(session_key=self.session.session_key) + + self.assertEqual(s.get_decoded(), {"x": 1}) + + def test_sessionmanager_save(self): + """ + Test SessionManager.save method + """ + # Create a session + self.session["y"] = 1 + self.session.save() + + s = self.model.objects.get(session_key=self.session.session_key) + # Change it + self.model.objects.save(s.session_key, {"y": 2}, s.expire_date) + # Clear cache, so that it will be retrieved from DB + del self.session._session_cache + self.assertEqual(self.session["y"], 2) + + def test_clearsessions_command(self): + """ + Test clearsessions command for clearing expired sessions. + """ + self.assertEqual(0, self.model.objects.count()) + + # One object in the future + self.session["foo"] = "bar" + self.session.set_expiry(3600) + self.session.save() + + # One object in the past + other_session = self.backend() + other_session["foo"] = "bar" + other_session.set_expiry(-3600) + other_session.save() + + # Two sessions are in the database before clearsessions... + self.assertEqual(2, self.model.objects.count()) + with override_settings(SESSION_ENGINE=self.session_engine): + management.call_command("clearsessions") + # ... and one is deleted. + self.assertEqual(1, self.model.objects.count()) + + +@override_settings(USE_TZ=True) +class DatabaseSessionWithTimeZoneTests(DatabaseSessionTests): + pass + + +class CustomDatabaseSessionTests(DatabaseSessionTests): + backend = CustomDatabaseSession + session_engine = "sessions_tests.models" + custom_session_cookie_age = 60 * 60 * 24 # One day. + + def test_extra_session_field(self): + # Set the account ID to be picked up by a custom session storage + # and saved to a custom session model database column. + self.session["_auth_user_id"] = 42 + self.session.save() + + # Make sure that the customized create_model_instance() was called. + s = self.model.objects.get(session_key=self.session.session_key) + self.assertEqual(s.account_id, 42) + + # Make the session "anonymous". + self.session.pop("_auth_user_id") + self.session.save() + + # Make sure that save() on an existing session did the right job. + s = self.model.objects.get(session_key=self.session.session_key) + self.assertIsNone(s.account_id) + + def test_custom_expiry_reset(self): + self.session.set_expiry(None) + self.session.set_expiry(10) + self.session.set_expiry(None) + self.assertEqual(self.session.get_expiry_age(), self.custom_session_cookie_age) + + def test_default_expiry(self): + self.assertEqual(self.session.get_expiry_age(), self.custom_session_cookie_age) + self.session.set_expiry(0) + self.assertEqual(self.session.get_expiry_age(), self.custom_session_cookie_age) + + +class CacheDBSessionTests(SessionTestsMixin, TestCase): + backend = CacheDBSession + + def test_exists_searches_cache_first(self): + self.session.save() + with self.assertNumQueries(0): + self.assertIs(self.session.exists(self.session.session_key), True) + + # Some backends might issue a warning + @ignore_warnings(module="django.core.cache.backends.base") + def test_load_overlong_key(self): + self.session._session_key = (string.ascii_letters + string.digits) * 20 + self.assertEqual(self.session.load(), {}) + + @override_settings(SESSION_CACHE_ALIAS="sessions") + def test_non_default_cache(self): + # 21000 - CacheDB backend should respect SESSION_CACHE_ALIAS. + with self.assertRaises(InvalidCacheBackendError): + self.backend() + + +@override_settings(USE_TZ=True) +class CacheDBSessionWithTimeZoneTests(CacheDBSessionTests): + pass + + +class FileSessionTests(SessionTestsMixin, SimpleTestCase): + backend = FileSession + + def setUp(self): + # Do file session tests in an isolated directory, and kill it after we're done. + self.original_session_file_path = settings.SESSION_FILE_PATH + self.temp_session_store = settings.SESSION_FILE_PATH = self.mkdtemp() + # Reset the file session backend's internal caches + if hasattr(self.backend, "_storage_path"): + del self.backend._storage_path + super().setUp() + + def tearDown(self): + super().tearDown() + settings.SESSION_FILE_PATH = self.original_session_file_path + shutil.rmtree(self.temp_session_store) + + def mkdtemp(self): + return tempfile.mkdtemp() + + @override_settings( + SESSION_FILE_PATH="/if/this/directory/exists/you/have/a/weird/computer", + ) + def test_configuration_check(self): + del self.backend._storage_path + # Make sure the file backend checks for a good storage dir + with self.assertRaises(ImproperlyConfigured): + self.backend() + + def test_invalid_key_backslash(self): + # Ensure we don't allow directory-traversal. + # This is tested directly on _key_to_file, as load() will swallow + # a SuspiciousOperation in the same way as an OSError - by creating + # a new session, making it unclear whether the slashes were detected. + with self.assertRaises(InvalidSessionKey): + self.backend()._key_to_file("a\\b\\c") + + def test_invalid_key_forwardslash(self): + # Ensure we don't allow directory-traversal + with self.assertRaises(InvalidSessionKey): + self.backend()._key_to_file("a/b/c") + + @override_settings( + SESSION_ENGINE="django.contrib.sessions.backends.file", + SESSION_COOKIE_AGE=0, + ) + def test_clearsessions_command(self): + """ + Test clearsessions command for clearing expired sessions. + """ + storage_path = self.backend._get_storage_path() + file_prefix = settings.SESSION_COOKIE_NAME + + def count_sessions(): + return len( + [ + session_file + for session_file in os.listdir(storage_path) + if session_file.startswith(file_prefix) + ] + ) + + self.assertEqual(0, count_sessions()) + + # One object in the future + self.session["foo"] = "bar" + self.session.set_expiry(3600) + self.session.save() + + # One object in the past + other_session = self.backend() + other_session["foo"] = "bar" + other_session.set_expiry(-3600) + other_session.save() + + # One object in the present without an expiry (should be deleted since + # its modification time + SESSION_COOKIE_AGE will be in the past when + # clearsessions runs). + other_session2 = self.backend() + other_session2["foo"] = "bar" + other_session2.save() + + # Three sessions are in the filesystem before clearsessions... + self.assertEqual(3, count_sessions()) + management.call_command("clearsessions") + # ... and two are deleted. + self.assertEqual(1, count_sessions()) + + +class FileSessionPathLibTests(FileSessionTests): + def mkdtemp(self): + tmp_dir = super().mkdtemp() + return Path(tmp_dir) + + +class CacheSessionTests(SessionTestsMixin, SimpleTestCase): + backend = CacheSession + + # Some backends might issue a warning + @ignore_warnings(module="django.core.cache.backends.base") + def test_load_overlong_key(self): + self.session._session_key = (string.ascii_letters + string.digits) * 20 + self.assertEqual(self.session.load(), {}) + + def test_default_cache(self): + self.session.save() + self.assertIsNotNone(caches["default"].get(self.session.cache_key)) + + @override_settings( + CACHES={ + "default": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", + }, + "sessions": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + "LOCATION": "session", + }, + }, + SESSION_CACHE_ALIAS="sessions", + ) + def test_non_default_cache(self): + # Re-initialize the session backend to make use of overridden settings. + self.session = self.backend() + + self.session.save() + self.assertIsNone(caches["default"].get(self.session.cache_key)) + self.assertIsNotNone(caches["sessions"].get(self.session.cache_key)) + + def test_create_and_save(self): + self.session = self.backend() + self.session.create() + self.session.save() + self.assertIsNotNone(caches["default"].get(self.session.cache_key)) + + +class SessionMiddlewareTests(TestCase): + request_factory = RequestFactory() + + @staticmethod + def get_response_touching_session(request): + request.session["hello"] = "world" + return HttpResponse("Session test") + + @override_settings(SESSION_COOKIE_SECURE=True) + def test_secure_session_cookie(self): + request = self.request_factory.get("/") + middleware = SessionMiddleware(self.get_response_touching_session) + + # Handle the response through the middleware + response = middleware(request) + self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]["secure"], True) + + @override_settings(SESSION_COOKIE_HTTPONLY=True) + def test_httponly_session_cookie(self): + request = self.request_factory.get("/") + middleware = SessionMiddleware(self.get_response_touching_session) + + # Handle the response through the middleware + response = middleware(request) + self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]["httponly"], True) + self.assertIn( + cookies.Morsel._reserved["httponly"], + str(response.cookies[settings.SESSION_COOKIE_NAME]), + ) + + @override_settings(SESSION_COOKIE_SAMESITE="Strict") + def test_samesite_session_cookie(self): + request = self.request_factory.get("/") + middleware = SessionMiddleware(self.get_response_touching_session) + response = middleware(request) + self.assertEqual( + response.cookies[settings.SESSION_COOKIE_NAME]["samesite"], "Strict" + ) + + @override_settings(SESSION_COOKIE_HTTPONLY=False) + def test_no_httponly_session_cookie(self): + request = self.request_factory.get("/") + middleware = SessionMiddleware(self.get_response_touching_session) + response = middleware(request) + self.assertEqual(response.cookies[settings.SESSION_COOKIE_NAME]["httponly"], "") + self.assertNotIn( + cookies.Morsel._reserved["httponly"], + str(response.cookies[settings.SESSION_COOKIE_NAME]), + ) + + def test_session_save_on_500(self): + def response_500(request): + response = HttpResponse("Horrible error") + response.status_code = 500 + request.session["hello"] = "world" + return response + + request = self.request_factory.get("/") + SessionMiddleware(response_500)(request) + + # The value wasn't saved above. + self.assertNotIn("hello", request.session.load()) + + def test_session_save_on_5xx(self): + def response_503(request): + response = HttpResponse("Service Unavailable") + response.status_code = 503 + request.session["hello"] = "world" + return response + + request = self.request_factory.get("/") + SessionMiddleware(response_503)(request) + + # The value wasn't saved above. + self.assertNotIn("hello", request.session.load()) + + def test_session_update_error_redirect(self): + def response_delete_session(request): + request.session = DatabaseSession() + request.session.save(must_create=True) + request.session.delete() + return HttpResponse() + + request = self.request_factory.get("/foo/") + middleware = SessionMiddleware(response_delete_session) + + msg = ( + "The request's session was deleted before the request completed. " + "The user may have logged out in a concurrent request, for example." + ) + with self.assertRaisesMessage(SessionInterrupted, msg): + # Handle the response through the middleware. It will try to save + # the deleted session which will cause an UpdateError that's caught + # and raised as a SessionInterrupted. + middleware(request) + + def test_session_delete_on_end(self): + def response_ending_session(request): + request.session.flush() + return HttpResponse("Session test") + + request = self.request_factory.get("/") + middleware = SessionMiddleware(response_ending_session) + + # Before deleting, there has to be an existing cookie + request.COOKIES[settings.SESSION_COOKIE_NAME] = "abc" + + # Handle the response through the middleware + response = middleware(request) + + # The cookie was deleted, not recreated. + # A deleted cookie header looks like: + # "Set-Cookie: sessionid=; expires=Thu, 01 Jan 1970 00:00:00 GMT; " + # "Max-Age=0; Path=/" + self.assertEqual( + 'Set-Cookie: {}=""; expires=Thu, 01 Jan 1970 00:00:00 GMT; ' + "Max-Age=0; Path=/; SameSite={}".format( + settings.SESSION_COOKIE_NAME, + settings.SESSION_COOKIE_SAMESITE, + ), + str(response.cookies[settings.SESSION_COOKIE_NAME]), + ) + # SessionMiddleware sets 'Vary: Cookie' to prevent the 'Set-Cookie' + # from being cached. + self.assertEqual(response.headers["Vary"], "Cookie") + + @override_settings( + SESSION_COOKIE_DOMAIN=".example.local", SESSION_COOKIE_PATH="/example/" + ) + def test_session_delete_on_end_with_custom_domain_and_path(self): + def response_ending_session(request): + request.session.flush() + return HttpResponse("Session test") + + request = self.request_factory.get("/") + middleware = SessionMiddleware(response_ending_session) + + # Before deleting, there has to be an existing cookie + request.COOKIES[settings.SESSION_COOKIE_NAME] = "abc" + + # Handle the response through the middleware + response = middleware(request) + + # The cookie was deleted, not recreated. + # A deleted cookie header with a custom domain and path looks like: + # Set-Cookie: sessionid=; Domain=.example.local; + # expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; + # Path=/example/ + self.assertEqual( + 'Set-Cookie: {}=""; Domain=.example.local; expires=Thu, ' + "01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/example/; SameSite={}".format( + settings.SESSION_COOKIE_NAME, + settings.SESSION_COOKIE_SAMESITE, + ), + str(response.cookies[settings.SESSION_COOKIE_NAME]), + ) + + def test_flush_empty_without_session_cookie_doesnt_set_cookie(self): + def response_ending_session(request): + request.session.flush() + return HttpResponse("Session test") + + request = self.request_factory.get("/") + middleware = SessionMiddleware(response_ending_session) + + # Handle the response through the middleware + response = middleware(request) + + # A cookie should not be set. + self.assertEqual(response.cookies, {}) + # The session is accessed so "Vary: Cookie" should be set. + self.assertEqual(response.headers["Vary"], "Cookie") + + def test_empty_session_saved(self): + """ + If a session is emptied of data but still has a key, it should still + be updated. + """ + + def response_set_session(request): + # Set a session key and some data. + request.session["foo"] = "bar" + return HttpResponse("Session test") + + request = self.request_factory.get("/") + middleware = SessionMiddleware(response_set_session) + + # Handle the response through the middleware. + response = middleware(request) + self.assertEqual(tuple(request.session.items()), (("foo", "bar"),)) + # A cookie should be set, along with Vary: Cookie. + self.assertIn( + "Set-Cookie: sessionid=%s" % request.session.session_key, + str(response.cookies), + ) + self.assertEqual(response.headers["Vary"], "Cookie") + + # Empty the session data. + del request.session["foo"] + # Handle the response through the middleware. + response = HttpResponse("Session test") + response = middleware.process_response(request, response) + self.assertEqual(dict(request.session.values()), {}) + session = Session.objects.get(session_key=request.session.session_key) + self.assertEqual(session.get_decoded(), {}) + # While the session is empty, it hasn't been flushed so a cookie should + # still be set, along with Vary: Cookie. + self.assertGreater(len(request.session.session_key), 8) + self.assertIn( + "Set-Cookie: sessionid=%s" % request.session.session_key, + str(response.cookies), + ) + self.assertEqual(response.headers["Vary"], "Cookie") + + +class CookieSessionTests(SessionTestsMixin, SimpleTestCase): + backend = CookieSession + + def test_save(self): + """ + This test tested exists() in the other session backends, but that + doesn't make sense for us. + """ + pass + + def test_cycle(self): + """ + This test tested cycle_key() which would create a new session + key for the same session data. But we can't invalidate previously + signed cookies (other than letting them expire naturally) so + testing for this behavior is meaningless. + """ + pass + + @unittest.expectedFailure + def test_actual_expiry(self): + # The cookie backend doesn't handle non-default expiry dates, see #19201 + super().test_actual_expiry() + + def test_unpickling_exception(self): + # signed_cookies backend should handle unpickle exceptions gracefully + # by creating a new session + self.assertEqual(self.session.serializer, JSONSerializer) + self.session.save() + with mock.patch("django.core.signing.loads", side_effect=ValueError): + self.session.load() + + @unittest.skip( + "Cookie backend doesn't have an external store to create records in." + ) + def test_session_load_does_not_create_record(self): + pass + + @unittest.skip( + "CookieSession is stored in the client and there is no way to query it." + ) + def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self): + pass + + +class ClearSessionsCommandTests(SimpleTestCase): + def test_clearsessions_unsupported(self): + msg = ( + "Session engine 'sessions_tests.no_clear_expired' doesn't " + "support clearing expired sessions." + ) + with self.settings(SESSION_ENGINE="sessions_tests.no_clear_expired"): + with self.assertRaisesMessage(management.CommandError, msg): + management.call_command("clearsessions") diff --git a/testbed/django__django/tests/settings_tests/__init__.py b/testbed/django__django/tests/settings_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/settings_tests/tests.py b/testbed/django__django/tests/settings_tests/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..b2044878c98f82fcb2325c1629c9c9cd4ac81864 --- /dev/null +++ b/testbed/django__django/tests/settings_tests/tests.py @@ -0,0 +1,655 @@ +import os +import sys +import unittest +from types import ModuleType, SimpleNamespace +from unittest import mock + +from django.conf import ENVIRONMENT_VARIABLE, LazySettings, Settings, settings +from django.core.exceptions import ImproperlyConfigured +from django.http import HttpRequest +from django.test import ( + SimpleTestCase, + TestCase, + TransactionTestCase, + modify_settings, + override_settings, + signals, +) +from django.test.utils import requires_tz_support +from django.urls import clear_script_prefix, set_script_prefix + + +@modify_settings(ITEMS={"prepend": ["b"], "append": ["d"], "remove": ["a", "e"]}) +@override_settings( + ITEMS=["a", "c", "e"], ITEMS_OUTER=[1, 2, 3], TEST="override", TEST_OUTER="outer" +) +class FullyDecoratedTranTestCase(TransactionTestCase): + available_apps = [] + + def test_override(self): + self.assertEqual(settings.ITEMS, ["b", "c", "d"]) + self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3]) + self.assertEqual(settings.TEST, "override") + self.assertEqual(settings.TEST_OUTER, "outer") + + @modify_settings( + ITEMS={ + "append": ["e", "f"], + "prepend": ["a"], + "remove": ["d", "c"], + } + ) + def test_method_list_override(self): + self.assertEqual(settings.ITEMS, ["a", "b", "e", "f"]) + self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3]) + + @modify_settings( + ITEMS={ + "append": ["b"], + "prepend": ["d"], + "remove": ["a", "c", "e"], + } + ) + def test_method_list_override_no_ops(self): + self.assertEqual(settings.ITEMS, ["b", "d"]) + + @modify_settings( + ITEMS={ + "append": "e", + "prepend": "a", + "remove": "c", + } + ) + def test_method_list_override_strings(self): + self.assertEqual(settings.ITEMS, ["a", "b", "d", "e"]) + + @modify_settings(ITEMS={"remove": ["b", "d"]}) + @modify_settings(ITEMS={"append": ["b"], "prepend": ["d"]}) + def test_method_list_override_nested_order(self): + self.assertEqual(settings.ITEMS, ["d", "c", "b"]) + + @override_settings(TEST="override2") + def test_method_override(self): + self.assertEqual(settings.TEST, "override2") + self.assertEqual(settings.TEST_OUTER, "outer") + + def test_decorated_testcase_name(self): + self.assertEqual( + FullyDecoratedTranTestCase.__name__, "FullyDecoratedTranTestCase" + ) + + def test_decorated_testcase_module(self): + self.assertEqual(FullyDecoratedTranTestCase.__module__, __name__) + + +@modify_settings(ITEMS={"prepend": ["b"], "append": ["d"], "remove": ["a", "e"]}) +@override_settings(ITEMS=["a", "c", "e"], TEST="override") +class FullyDecoratedTestCase(TestCase): + def test_override(self): + self.assertEqual(settings.ITEMS, ["b", "c", "d"]) + self.assertEqual(settings.TEST, "override") + + @modify_settings( + ITEMS={ + "append": "e", + "prepend": "a", + "remove": "c", + } + ) + @override_settings(TEST="override2") + def test_method_override(self): + self.assertEqual(settings.ITEMS, ["a", "b", "d", "e"]) + self.assertEqual(settings.TEST, "override2") + + +class ClassDecoratedTestCaseSuper(TestCase): + """ + Dummy class for testing max recursion error in child class call to + super(). Refs #17011. + """ + + def test_max_recursion_error(self): + pass + + +@override_settings(TEST="override") +class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.foo = getattr(settings, "TEST", "BUG") + + def test_override(self): + self.assertEqual(settings.TEST, "override") + + def test_setupclass_override(self): + """Settings are overridden within setUpClass (#21281).""" + self.assertEqual(self.foo, "override") + + @override_settings(TEST="override2") + def test_method_override(self): + self.assertEqual(settings.TEST, "override2") + + def test_max_recursion_error(self): + """ + Overriding a method on a super class and then calling that method on + the super class should not trigger infinite recursion. See #17011. + """ + super().test_max_recursion_error() + + +@modify_settings(ITEMS={"append": "mother"}) +@override_settings(ITEMS=["father"], TEST="override-parent") +class ParentDecoratedTestCase(TestCase): + pass + + +@modify_settings(ITEMS={"append": ["child"]}) +@override_settings(TEST="override-child") +class ChildDecoratedTestCase(ParentDecoratedTestCase): + def test_override_settings_inheritance(self): + self.assertEqual(settings.ITEMS, ["father", "mother", "child"]) + self.assertEqual(settings.TEST, "override-child") + + +class SettingsTests(SimpleTestCase): + def setUp(self): + self.testvalue = None + signals.setting_changed.connect(self.signal_callback) + + def tearDown(self): + signals.setting_changed.disconnect(self.signal_callback) + + def signal_callback(self, sender, setting, value, **kwargs): + if setting == "TEST": + self.testvalue = value + + def test_override(self): + settings.TEST = "test" + self.assertEqual("test", settings.TEST) + with self.settings(TEST="override"): + self.assertEqual("override", settings.TEST) + self.assertEqual("test", settings.TEST) + del settings.TEST + + def test_override_change(self): + settings.TEST = "test" + self.assertEqual("test", settings.TEST) + with self.settings(TEST="override"): + self.assertEqual("override", settings.TEST) + settings.TEST = "test2" + self.assertEqual("test", settings.TEST) + del settings.TEST + + def test_override_doesnt_leak(self): + with self.assertRaises(AttributeError): + getattr(settings, "TEST") + with self.settings(TEST="override"): + self.assertEqual("override", settings.TEST) + settings.TEST = "test" + with self.assertRaises(AttributeError): + getattr(settings, "TEST") + + @override_settings(TEST="override") + def test_decorator(self): + self.assertEqual("override", settings.TEST) + + def test_context_manager(self): + with self.assertRaises(AttributeError): + getattr(settings, "TEST") + override = override_settings(TEST="override") + with self.assertRaises(AttributeError): + getattr(settings, "TEST") + override.enable() + self.assertEqual("override", settings.TEST) + override.disable() + with self.assertRaises(AttributeError): + getattr(settings, "TEST") + + def test_class_decorator(self): + # SimpleTestCase can be decorated by override_settings, but not ut.TestCase + class SimpleTestCaseSubclass(SimpleTestCase): + pass + + class UnittestTestCaseSubclass(unittest.TestCase): + pass + + decorated = override_settings(TEST="override")(SimpleTestCaseSubclass) + self.assertIsInstance(decorated, type) + self.assertTrue(issubclass(decorated, SimpleTestCase)) + + with self.assertRaisesMessage( + Exception, "Only subclasses of Django SimpleTestCase" + ): + decorated = override_settings(TEST="override")(UnittestTestCaseSubclass) + + def test_signal_callback_context_manager(self): + with self.assertRaises(AttributeError): + getattr(settings, "TEST") + with self.settings(TEST="override"): + self.assertEqual(self.testvalue, "override") + self.assertIsNone(self.testvalue) + + @override_settings(TEST="override") + def test_signal_callback_decorator(self): + self.assertEqual(self.testvalue, "override") + + # + # Regression tests for #10130: deleting settings. + # + + def test_settings_delete(self): + settings.TEST = "test" + self.assertEqual("test", settings.TEST) + del settings.TEST + msg = "'Settings' object has no attribute 'TEST'" + with self.assertRaisesMessage(AttributeError, msg): + getattr(settings, "TEST") + + def test_settings_delete_wrapped(self): + with self.assertRaisesMessage(TypeError, "can't delete _wrapped."): + delattr(settings, "_wrapped") + + def test_override_settings_delete(self): + """ + Allow deletion of a setting in an overridden settings set (#18824) + """ + previous_i18n = settings.USE_I18N + previous_tz = settings.USE_TZ + with self.settings(USE_I18N=False): + del settings.USE_I18N + with self.assertRaises(AttributeError): + getattr(settings, "USE_I18N") + # Should also work for a non-overridden setting + del settings.USE_TZ + with self.assertRaises(AttributeError): + getattr(settings, "USE_TZ") + self.assertNotIn("USE_I18N", dir(settings)) + self.assertNotIn("USE_TZ", dir(settings)) + self.assertEqual(settings.USE_I18N, previous_i18n) + self.assertEqual(settings.USE_TZ, previous_tz) + + def test_override_settings_nested(self): + """ + override_settings uses the actual _wrapped attribute at + runtime, not when it was instantiated. + """ + + with self.assertRaises(AttributeError): + getattr(settings, "TEST") + with self.assertRaises(AttributeError): + getattr(settings, "TEST2") + + inner = override_settings(TEST2="override") + with override_settings(TEST="override"): + self.assertEqual("override", settings.TEST) + with inner: + self.assertEqual("override", settings.TEST) + self.assertEqual("override", settings.TEST2) + # inner's __exit__ should have restored the settings of the outer + # context manager, not those when the class was instantiated + self.assertEqual("override", settings.TEST) + with self.assertRaises(AttributeError): + getattr(settings, "TEST2") + + with self.assertRaises(AttributeError): + getattr(settings, "TEST") + with self.assertRaises(AttributeError): + getattr(settings, "TEST2") + + @override_settings(SECRET_KEY="") + def test_no_secret_key(self): + msg = "The SECRET_KEY setting must not be empty." + with self.assertRaisesMessage(ImproperlyConfigured, msg): + settings.SECRET_KEY + + def test_no_settings_module(self): + msg = ( + "Requested setting%s, but settings are not configured. You " + "must either define the environment variable DJANGO_SETTINGS_MODULE " + "or call settings.configure() before accessing settings." + ) + orig_settings = os.environ[ENVIRONMENT_VARIABLE] + os.environ[ENVIRONMENT_VARIABLE] = "" + try: + with self.assertRaisesMessage(ImproperlyConfigured, msg % "s"): + settings._setup() + with self.assertRaisesMessage(ImproperlyConfigured, msg % " TEST"): + settings._setup("TEST") + finally: + os.environ[ENVIRONMENT_VARIABLE] = orig_settings + + def test_already_configured(self): + with self.assertRaisesMessage(RuntimeError, "Settings already configured."): + settings.configure() + + def test_nonupper_settings_prohibited_in_configure(self): + s = LazySettings() + with self.assertRaisesMessage(TypeError, "Setting 'foo' must be uppercase."): + s.configure(foo="bar") + + def test_nonupper_settings_ignored_in_default_settings(self): + s = LazySettings() + s.configure(SimpleNamespace(foo="bar")) + with self.assertRaises(AttributeError): + getattr(s, "foo") + + @requires_tz_support + @mock.patch("django.conf.global_settings.TIME_ZONE", "test") + def test_incorrect_timezone(self): + with self.assertRaisesMessage(ValueError, "Incorrect timezone setting: test"): + settings._setup() + + +class TestComplexSettingOverride(SimpleTestCase): + def setUp(self): + self.old_warn_override_settings = signals.COMPLEX_OVERRIDE_SETTINGS.copy() + signals.COMPLEX_OVERRIDE_SETTINGS.add("TEST_WARN") + + def tearDown(self): + signals.COMPLEX_OVERRIDE_SETTINGS = self.old_warn_override_settings + self.assertNotIn("TEST_WARN", signals.COMPLEX_OVERRIDE_SETTINGS) + + def test_complex_override_warning(self): + """Regression test for #19031""" + msg = "Overriding setting TEST_WARN can lead to unexpected behavior." + with self.assertWarnsMessage(UserWarning, msg) as cm: + with override_settings(TEST_WARN="override"): + self.assertEqual(settings.TEST_WARN, "override") + self.assertEqual(cm.filename, __file__) + + +class SecureProxySslHeaderTest(SimpleTestCase): + @override_settings(SECURE_PROXY_SSL_HEADER=None) + def test_none(self): + req = HttpRequest() + self.assertIs(req.is_secure(), False) + + @override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https")) + def test_set_without_xheader(self): + req = HttpRequest() + self.assertIs(req.is_secure(), False) + + @override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https")) + def test_set_with_xheader_wrong(self): + req = HttpRequest() + req.META["HTTP_X_FORWARDED_PROTO"] = "wrongvalue" + self.assertIs(req.is_secure(), False) + + @override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https")) + def test_set_with_xheader_right(self): + req = HttpRequest() + req.META["HTTP_X_FORWARDED_PROTO"] = "https" + self.assertIs(req.is_secure(), True) + + @override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https")) + def test_set_with_xheader_leftmost_right(self): + req = HttpRequest() + req.META["HTTP_X_FORWARDED_PROTO"] = "https, http" + self.assertIs(req.is_secure(), True) + req.META["HTTP_X_FORWARDED_PROTO"] = "https , http" + self.assertIs(req.is_secure(), True) + + @override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https")) + def test_set_with_xheader_leftmost_not_secure(self): + req = HttpRequest() + req.META["HTTP_X_FORWARDED_PROTO"] = "http, https" + self.assertIs(req.is_secure(), False) + + @override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https")) + def test_set_with_xheader_multiple_not_secure(self): + req = HttpRequest() + req.META["HTTP_X_FORWARDED_PROTO"] = "http ,wrongvalue,http,http" + self.assertIs(req.is_secure(), False) + + @override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https")) + def test_xheader_preferred_to_underlying_request(self): + class ProxyRequest(HttpRequest): + def _get_scheme(self): + """Proxy always connecting via HTTPS""" + return "https" + + # Client connects via HTTP. + req = ProxyRequest() + req.META["HTTP_X_FORWARDED_PROTO"] = "http" + self.assertIs(req.is_secure(), False) + + +class IsOverriddenTest(SimpleTestCase): + def test_configure(self): + s = LazySettings() + s.configure(SECRET_KEY="foo") + + self.assertTrue(s.is_overridden("SECRET_KEY")) + + def test_module(self): + settings_module = ModuleType("fake_settings_module") + settings_module.SECRET_KEY = "foo" + settings_module.USE_TZ = False + sys.modules["fake_settings_module"] = settings_module + try: + s = Settings("fake_settings_module") + + self.assertTrue(s.is_overridden("SECRET_KEY")) + self.assertFalse(s.is_overridden("ALLOWED_HOSTS")) + finally: + del sys.modules["fake_settings_module"] + + def test_override(self): + self.assertFalse(settings.is_overridden("ALLOWED_HOSTS")) + with override_settings(ALLOWED_HOSTS=[]): + self.assertTrue(settings.is_overridden("ALLOWED_HOSTS")) + + def test_unevaluated_lazysettings_repr(self): + lazy_settings = LazySettings() + expected = "" + self.assertEqual(repr(lazy_settings), expected) + + def test_evaluated_lazysettings_repr(self): + lazy_settings = LazySettings() + module = os.environ.get(ENVIRONMENT_VARIABLE) + expected = '' % module + # Force evaluation of the lazy object. + lazy_settings.APPEND_SLASH + self.assertEqual(repr(lazy_settings), expected) + + def test_usersettingsholder_repr(self): + lazy_settings = LazySettings() + lazy_settings.configure(APPEND_SLASH=False) + expected = "" + self.assertEqual(repr(lazy_settings._wrapped), expected) + + def test_settings_repr(self): + module = os.environ.get(ENVIRONMENT_VARIABLE) + lazy_settings = Settings(module) + expected = '' % module + self.assertEqual(repr(lazy_settings), expected) + + +class TestListSettings(SimpleTestCase): + """ + Make sure settings that should be lists or tuples throw + ImproperlyConfigured if they are set to a string instead of a list or tuple. + """ + + list_or_tuple_settings = ( + "ALLOWED_HOSTS", + "INSTALLED_APPS", + "TEMPLATE_DIRS", + "LOCALE_PATHS", + "SECRET_KEY_FALLBACKS", + ) + + def test_tuple_settings(self): + settings_module = ModuleType("fake_settings_module") + settings_module.SECRET_KEY = "foo" + msg = "The %s setting must be a list or a tuple." + for setting in self.list_or_tuple_settings: + setattr(settings_module, setting, ("non_list_or_tuple_value")) + sys.modules["fake_settings_module"] = settings_module + try: + with self.assertRaisesMessage(ImproperlyConfigured, msg % setting): + Settings("fake_settings_module") + finally: + del sys.modules["fake_settings_module"] + delattr(settings_module, setting) + + +class SettingChangeEnterException(Exception): + pass + + +class SettingChangeExitException(Exception): + pass + + +class OverrideSettingsIsolationOnExceptionTests(SimpleTestCase): + """ + The override_settings context manager restore settings if one of the + receivers of "setting_changed" signal fails. Check the three cases of + receiver failure detailed in receiver(). In each case, ALL receivers are + called when exiting the context manager. + """ + + def setUp(self): + signals.setting_changed.connect(self.receiver) + self.addCleanup(signals.setting_changed.disconnect, self.receiver) + # Create a spy that's connected to the `setting_changed` signal and + # executed AFTER `self.receiver`. + self.spy_receiver = mock.Mock() + signals.setting_changed.connect(self.spy_receiver) + self.addCleanup(signals.setting_changed.disconnect, self.spy_receiver) + + def receiver(self, **kwargs): + """ + A receiver that fails while certain settings are being changed. + - SETTING_BOTH raises an error while receiving the signal + on both entering and exiting the context manager. + - SETTING_ENTER raises an error only on enter. + - SETTING_EXIT raises an error only on exit. + """ + setting = kwargs["setting"] + enter = kwargs["enter"] + if setting in ("SETTING_BOTH", "SETTING_ENTER") and enter: + raise SettingChangeEnterException + if setting in ("SETTING_BOTH", "SETTING_EXIT") and not enter: + raise SettingChangeExitException + + def check_settings(self): + """Assert that settings for these tests aren't present.""" + self.assertFalse(hasattr(settings, "SETTING_BOTH")) + self.assertFalse(hasattr(settings, "SETTING_ENTER")) + self.assertFalse(hasattr(settings, "SETTING_EXIT")) + self.assertFalse(hasattr(settings, "SETTING_PASS")) + + def check_spy_receiver_exit_calls(self, call_count): + """ + Assert that `self.spy_receiver` was called exactly `call_count` times + with the ``enter=False`` keyword argument. + """ + kwargs_with_exit = [ + kwargs + for args, kwargs in self.spy_receiver.call_args_list + if ("enter", False) in kwargs.items() + ] + self.assertEqual(len(kwargs_with_exit), call_count) + + def test_override_settings_both(self): + """Receiver fails on both enter and exit.""" + with self.assertRaises(SettingChangeEnterException): + with override_settings(SETTING_PASS="BOTH", SETTING_BOTH="BOTH"): + pass + + self.check_settings() + # Two settings were touched, so expect two calls of `spy_receiver`. + self.check_spy_receiver_exit_calls(call_count=2) + + def test_override_settings_enter(self): + """Receiver fails on enter only.""" + with self.assertRaises(SettingChangeEnterException): + with override_settings(SETTING_PASS="ENTER", SETTING_ENTER="ENTER"): + pass + + self.check_settings() + # Two settings were touched, so expect two calls of `spy_receiver`. + self.check_spy_receiver_exit_calls(call_count=2) + + def test_override_settings_exit(self): + """Receiver fails on exit only.""" + with self.assertRaises(SettingChangeExitException): + with override_settings(SETTING_PASS="EXIT", SETTING_EXIT="EXIT"): + pass + + self.check_settings() + # Two settings were touched, so expect two calls of `spy_receiver`. + self.check_spy_receiver_exit_calls(call_count=2) + + def test_override_settings_reusable_on_enter(self): + """ + Error is raised correctly when reusing the same override_settings + instance. + """ + + @override_settings(SETTING_ENTER="ENTER") + def decorated_function(): + pass + + with self.assertRaises(SettingChangeEnterException): + decorated_function() + signals.setting_changed.disconnect(self.receiver) + # This call shouldn't raise any errors. + decorated_function() + + +class MediaURLStaticURLPrefixTest(SimpleTestCase): + def set_script_name(self, val): + clear_script_prefix() + if val is not None: + set_script_prefix(val) + + def test_not_prefixed(self): + # Don't add SCRIPT_NAME prefix to absolute paths, URLs, or None. + tests = ( + "/path/", + "http://myhost.com/path/", + "http://myhost/path/", + "https://myhost/path/", + None, + ) + for setting in ("MEDIA_URL", "STATIC_URL"): + for path in tests: + new_settings = {setting: path} + with self.settings(**new_settings): + for script_name in ["/somesubpath", "/somesubpath/", "/", "", None]: + with self.subTest(script_name=script_name, **new_settings): + try: + self.set_script_name(script_name) + self.assertEqual(getattr(settings, setting), path) + finally: + clear_script_prefix() + + def test_add_script_name_prefix(self): + tests = ( + # Relative paths. + ("/somesubpath", "path/", "/somesubpath/path/"), + ("/somesubpath/", "path/", "/somesubpath/path/"), + ("/", "path/", "/path/"), + # Invalid URLs. + ( + "/somesubpath/", + "htp://myhost.com/path/", + "/somesubpath/htp://myhost.com/path/", + ), + # Blank settings. + ("/somesubpath/", "", "/somesubpath/"), + ) + for setting in ("MEDIA_URL", "STATIC_URL"): + for script_name, path, expected_path in tests: + new_settings = {setting: path} + with self.settings(**new_settings): + with self.subTest(script_name=script_name, **new_settings): + try: + self.set_script_name(script_name) + self.assertEqual(getattr(settings, setting), expected_path) + finally: + clear_script_prefix() diff --git a/testbed/django__django/tests/shell/__init__.py b/testbed/django__django/tests/shell/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/shell/tests.py b/testbed/django__django/tests/shell/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..1a5f22f032652f819cf71736f23c94b49ce0fe34 --- /dev/null +++ b/testbed/django__django/tests/shell/tests.py @@ -0,0 +1,96 @@ +import sys +import unittest +from unittest import mock + +from django import __version__ +from django.core.management import CommandError, call_command +from django.test import SimpleTestCase +from django.test.utils import captured_stdin, captured_stdout + + +class ShellCommandTestCase(SimpleTestCase): + script_globals = 'print("__name__" in globals())' + script_with_inline_function = ( + "import django\ndef f():\n print(django.__version__)\nf()" + ) + + def test_command_option(self): + with self.assertLogs("test", "INFO") as cm: + call_command( + "shell", + command=( + "import django; from logging import getLogger; " + 'getLogger("test").info(django.__version__)' + ), + ) + self.assertEqual(cm.records[0].getMessage(), __version__) + + def test_command_option_globals(self): + with captured_stdout() as stdout: + call_command("shell", command=self.script_globals) + self.assertEqual(stdout.getvalue().strip(), "True") + + def test_command_option_inline_function_call(self): + with captured_stdout() as stdout: + call_command("shell", command=self.script_with_inline_function) + self.assertEqual(stdout.getvalue().strip(), __version__) + + @unittest.skipIf( + sys.platform == "win32", "Windows select() doesn't support file descriptors." + ) + @mock.patch("django.core.management.commands.shell.select") + def test_stdin_read(self, select): + with captured_stdin() as stdin, captured_stdout() as stdout: + stdin.write("print(100)\n") + stdin.seek(0) + call_command("shell") + self.assertEqual(stdout.getvalue().strip(), "100") + + @unittest.skipIf( + sys.platform == "win32", + "Windows select() doesn't support file descriptors.", + ) + @mock.patch("django.core.management.commands.shell.select") # [1] + def test_stdin_read_globals(self, select): + with captured_stdin() as stdin, captured_stdout() as stdout: + stdin.write(self.script_globals) + stdin.seek(0) + call_command("shell") + self.assertEqual(stdout.getvalue().strip(), "True") + + @unittest.skipIf( + sys.platform == "win32", + "Windows select() doesn't support file descriptors.", + ) + @mock.patch("django.core.management.commands.shell.select") # [1] + def test_stdin_read_inline_function_call(self, select): + with captured_stdin() as stdin, captured_stdout() as stdout: + stdin.write(self.script_with_inline_function) + stdin.seek(0) + call_command("shell") + self.assertEqual(stdout.getvalue().strip(), __version__) + + @mock.patch("django.core.management.commands.shell.select.select") # [1] + @mock.patch.dict("sys.modules", {"IPython": None}) + def test_shell_with_ipython_not_installed(self, select): + select.return_value = ([], [], []) + with self.assertRaisesMessage( + CommandError, "Couldn't import ipython interface." + ): + call_command("shell", interface="ipython") + + @mock.patch("django.core.management.commands.shell.select.select") # [1] + @mock.patch.dict("sys.modules", {"bpython": None}) + def test_shell_with_bpython_not_installed(self, select): + select.return_value = ([], [], []) + with self.assertRaisesMessage( + CommandError, "Couldn't import bpython interface." + ): + call_command("shell", interface="bpython") + + # [1] Patch select to prevent tests failing when when the test suite is run + # in parallel mode. The tests are run in a subprocess and the subprocess's + # stdin is closed and replaced by /dev/null. Reading from /dev/null always + # returns EOF and so select always shows that sys.stdin is ready to read. + # This causes problems because of the call to select.select() toward the + # end of shell's handle() method. diff --git a/testbed/django__django/tests/shortcuts/__init__.py b/testbed/django__django/tests/shortcuts/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/shortcuts/jinja2/shortcuts/using.html b/testbed/django__django/tests/shortcuts/jinja2/shortcuts/using.html new file mode 100644 index 0000000000000000000000000000000000000000..8ce973e958c3ed2f286da03958b4ae2abf4051f0 --- /dev/null +++ b/testbed/django__django/tests/shortcuts/jinja2/shortcuts/using.html @@ -0,0 +1 @@ +Jinja2 diff --git a/testbed/django__django/tests/shortcuts/templates/shortcuts/render_test.html b/testbed/django__django/tests/shortcuts/templates/shortcuts/render_test.html new file mode 100644 index 0000000000000000000000000000000000000000..c2bbd9aa79005f301e776bd95e69f12620c7eb92 --- /dev/null +++ b/testbed/django__django/tests/shortcuts/templates/shortcuts/render_test.html @@ -0,0 +1 @@ +{{ foo }}.{{ bar }}.{{ baz }}.{{ request.path }} diff --git a/testbed/django__django/tests/shortcuts/templates/shortcuts/using.html b/testbed/django__django/tests/shortcuts/templates/shortcuts/using.html new file mode 100644 index 0000000000000000000000000000000000000000..65bcbf65a43175bdff687667a40f638bcba5ce4c --- /dev/null +++ b/testbed/django__django/tests/shortcuts/templates/shortcuts/using.html @@ -0,0 +1 @@ +DTL diff --git a/testbed/django__django/tests/shortcuts/tests.py b/testbed/django__django/tests/shortcuts/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..8e9c13d206207318685090659019b3ef7da88a61 --- /dev/null +++ b/testbed/django__django/tests/shortcuts/tests.py @@ -0,0 +1,37 @@ +from django.test import SimpleTestCase, override_settings +from django.test.utils import require_jinja2 + + +@override_settings(ROOT_URLCONF="shortcuts.urls") +class RenderTests(SimpleTestCase): + def test_render(self): + response = self.client.get("/render/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"FOO.BAR../render/\n") + self.assertEqual(response.headers["Content-Type"], "text/html; charset=utf-8") + self.assertFalse(hasattr(response.context.request, "current_app")) + + def test_render_with_multiple_templates(self): + response = self.client.get("/render/multiple_templates/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"FOO.BAR../render/multiple_templates/\n") + + def test_render_with_content_type(self): + response = self.client.get("/render/content_type/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"FOO.BAR../render/content_type/\n") + self.assertEqual(response.headers["Content-Type"], "application/x-rendertest") + + def test_render_with_status(self): + response = self.client.get("/render/status/") + self.assertEqual(response.status_code, 403) + self.assertEqual(response.content, b"FOO.BAR../render/status/\n") + + @require_jinja2 + def test_render_with_using(self): + response = self.client.get("/render/using/") + self.assertEqual(response.content, b"DTL\n") + response = self.client.get("/render/using/?using=django") + self.assertEqual(response.content, b"DTL\n") + response = self.client.get("/render/using/?using=jinja2") + self.assertEqual(response.content, b"Jinja2\n") diff --git a/testbed/django__django/tests/shortcuts/urls.py b/testbed/django__django/tests/shortcuts/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..a80b35a92aa06c7f5fd40bcd06f5467d8701a690 --- /dev/null +++ b/testbed/django__django/tests/shortcuts/urls.py @@ -0,0 +1,11 @@ +from django.urls import path + +from . import views + +urlpatterns = [ + path("render/", views.render_view), + path("render/multiple_templates/", views.render_view_with_multiple_templates), + path("render/content_type/", views.render_view_with_content_type), + path("render/status/", views.render_view_with_status), + path("render/using/", views.render_view_with_using), +] diff --git a/testbed/django__django/tests/shortcuts/views.py b/testbed/django__django/tests/shortcuts/views.py new file mode 100644 index 0000000000000000000000000000000000000000..ddfbdd65bd63443d75d729707f078b669c391808 --- /dev/null +++ b/testbed/django__django/tests/shortcuts/views.py @@ -0,0 +1,55 @@ +from django.shortcuts import render + + +def render_view(request): + return render( + request, + "shortcuts/render_test.html", + { + "foo": "FOO", + "bar": "BAR", + }, + ) + + +def render_view_with_multiple_templates(request): + return render( + request, + [ + "shortcuts/no_such_template.html", + "shortcuts/render_test.html", + ], + { + "foo": "FOO", + "bar": "BAR", + }, + ) + + +def render_view_with_content_type(request): + return render( + request, + "shortcuts/render_test.html", + { + "foo": "FOO", + "bar": "BAR", + }, + content_type="application/x-rendertest", + ) + + +def render_view_with_status(request): + return render( + request, + "shortcuts/render_test.html", + { + "foo": "FOO", + "bar": "BAR", + }, + status=403, + ) + + +def render_view_with_using(request): + using = request.GET.get("using") + return render(request, "shortcuts/using.html", using=using) diff --git a/testbed/django__django/tests/signals/__init__.py b/testbed/django__django/tests/signals/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/signals/models.py b/testbed/django__django/tests/signals/models.py new file mode 100644 index 0000000000000000000000000000000000000000..b758244749aec02807fc9bd0e9851beaf61b13f3 --- /dev/null +++ b/testbed/django__django/tests/signals/models.py @@ -0,0 +1,37 @@ +""" +Testing signals before/after saving and deleting. +""" +from django.db import models + + +class Person(models.Model): + first_name = models.CharField(max_length=20) + last_name = models.CharField(max_length=20) + + def __str__(self): + return "%s %s" % (self.first_name, self.last_name) + + +class Car(models.Model): + make = models.CharField(max_length=20) + model = models.CharField(max_length=20) + + +class Author(models.Model): + name = models.CharField(max_length=20) + + def __str__(self): + return self.name + + +class Book(models.Model): + name = models.CharField(max_length=20) + authors = models.ManyToManyField(Author) + + def __str__(self): + return self.name + + +class Page(models.Model): + book = models.ForeignKey(Book, on_delete=models.CASCADE) + text = models.TextField() diff --git a/testbed/django__django/tests/signals/tests.py b/testbed/django__django/tests/signals/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..5558778bbe902996c7d5f953b672448e98557bd9 --- /dev/null +++ b/testbed/django__django/tests/signals/tests.py @@ -0,0 +1,628 @@ +from unittest import mock + +from asgiref.sync import markcoroutinefunction + +from django import dispatch +from django.apps.registry import Apps +from django.db import models +from django.db.models import signals +from django.dispatch import receiver +from django.test import SimpleTestCase, TestCase +from django.test.utils import isolate_apps + +from .models import Author, Book, Car, Page, Person + + +class BaseSignalSetup: + def setUp(self): + # Save up the number of connected signals so that we can check at the + # end that all the signals we register get properly unregistered (#9989) + self.pre_signals = ( + len(signals.pre_save.receivers), + len(signals.post_save.receivers), + len(signals.pre_delete.receivers), + len(signals.post_delete.receivers), + ) + + def tearDown(self): + # All our signals got disconnected properly. + post_signals = ( + len(signals.pre_save.receivers), + len(signals.post_save.receivers), + len(signals.pre_delete.receivers), + len(signals.post_delete.receivers), + ) + self.assertEqual(self.pre_signals, post_signals) + + +class SignalTests(BaseSignalSetup, TestCase): + def test_model_pre_init_and_post_init(self): + data = [] + + def pre_init_callback(sender, args, **kwargs): + data.append(kwargs["kwargs"]) + + signals.pre_init.connect(pre_init_callback) + + def post_init_callback(sender, instance, **kwargs): + data.append(instance) + + signals.post_init.connect(post_init_callback) + + p1 = Person(first_name="John", last_name="Doe") + self.assertEqual(data, [{}, p1]) + + def test_save_signals(self): + data = [] + + def pre_save_handler(signal, sender, instance, **kwargs): + data.append((instance, sender, kwargs.get("raw", False))) + + def post_save_handler(signal, sender, instance, **kwargs): + data.append( + (instance, sender, kwargs.get("created"), kwargs.get("raw", False)) + ) + + signals.pre_save.connect(pre_save_handler, weak=False) + signals.post_save.connect(post_save_handler, weak=False) + try: + p1 = Person.objects.create(first_name="John", last_name="Smith") + + self.assertEqual( + data, + [ + (p1, Person, False), + (p1, Person, True, False), + ], + ) + data[:] = [] + + p1.first_name = "Tom" + p1.save() + self.assertEqual( + data, + [ + (p1, Person, False), + (p1, Person, False, False), + ], + ) + data[:] = [] + + # Calling an internal method purely so that we can trigger a "raw" save. + p1.save_base(raw=True) + self.assertEqual( + data, + [ + (p1, Person, True), + (p1, Person, False, True), + ], + ) + data[:] = [] + + p2 = Person(first_name="James", last_name="Jones") + p2.id = 99999 + p2.save() + self.assertEqual( + data, + [ + (p2, Person, False), + (p2, Person, True, False), + ], + ) + data[:] = [] + p2.id = 99998 + p2.save() + self.assertEqual( + data, + [ + (p2, Person, False), + (p2, Person, True, False), + ], + ) + + # The sender should stay the same when using defer(). + data[:] = [] + p3 = Person.objects.defer("first_name").get(pk=p1.pk) + p3.last_name = "Reese" + p3.save() + self.assertEqual( + data, + [ + (p3, Person, False), + (p3, Person, False, False), + ], + ) + finally: + signals.pre_save.disconnect(pre_save_handler) + signals.post_save.disconnect(post_save_handler) + + def test_delete_signals(self): + data = [] + + def pre_delete_handler(signal, sender, instance, origin, **kwargs): + data.append((instance, sender, instance.id is None, origin)) + + # #8285: signals can be any callable + class PostDeleteHandler: + def __init__(self, data): + self.data = data + + def __call__(self, signal, sender, instance, origin, **kwargs): + self.data.append((instance, sender, instance.id is None, origin)) + + post_delete_handler = PostDeleteHandler(data) + + signals.pre_delete.connect(pre_delete_handler, weak=False) + signals.post_delete.connect(post_delete_handler, weak=False) + try: + p1 = Person.objects.create(first_name="John", last_name="Smith") + p1.delete() + self.assertEqual( + data, + [ + (p1, Person, False, p1), + (p1, Person, False, p1), + ], + ) + data[:] = [] + + p2 = Person(first_name="James", last_name="Jones") + p2.id = 99999 + p2.save() + p2.id = 99998 + p2.save() + p2.delete() + self.assertEqual( + data, + [ + (p2, Person, False, p2), + (p2, Person, False, p2), + ], + ) + data[:] = [] + + self.assertQuerySetEqual( + Person.objects.all(), + [ + "James Jones", + ], + str, + ) + finally: + signals.pre_delete.disconnect(pre_delete_handler) + signals.post_delete.disconnect(post_delete_handler) + + def test_delete_signals_origin_model(self): + data = [] + + def pre_delete_handler(signal, sender, instance, origin, **kwargs): + data.append((sender, origin)) + + def post_delete_handler(signal, sender, instance, origin, **kwargs): + data.append((sender, origin)) + + person = Person.objects.create(first_name="John", last_name="Smith") + book = Book.objects.create(name="Rayuela") + Page.objects.create(text="Page 1", book=book) + Page.objects.create(text="Page 2", book=book) + + signals.pre_delete.connect(pre_delete_handler, weak=False) + signals.post_delete.connect(post_delete_handler, weak=False) + try: + # Instance deletion. + person.delete() + self.assertEqual(data, [(Person, person), (Person, person)]) + data[:] = [] + # Cascade deletion. + book.delete() + self.assertEqual( + data, + [ + (Page, book), + (Page, book), + (Book, book), + (Page, book), + (Page, book), + (Book, book), + ], + ) + finally: + signals.pre_delete.disconnect(pre_delete_handler) + signals.post_delete.disconnect(post_delete_handler) + + def test_delete_signals_origin_queryset(self): + data = [] + + def pre_delete_handler(signal, sender, instance, origin, **kwargs): + data.append((sender, origin)) + + def post_delete_handler(signal, sender, instance, origin, **kwargs): + data.append((sender, origin)) + + Person.objects.create(first_name="John", last_name="Smith") + book = Book.objects.create(name="Rayuela") + Page.objects.create(text="Page 1", book=book) + Page.objects.create(text="Page 2", book=book) + + signals.pre_delete.connect(pre_delete_handler, weak=False) + signals.post_delete.connect(post_delete_handler, weak=False) + try: + # Queryset deletion. + qs = Person.objects.all() + qs.delete() + self.assertEqual(data, [(Person, qs), (Person, qs)]) + data[:] = [] + # Cascade deletion. + qs = Book.objects.all() + qs.delete() + self.assertEqual( + data, + [ + (Page, qs), + (Page, qs), + (Book, qs), + (Page, qs), + (Page, qs), + (Book, qs), + ], + ) + finally: + signals.pre_delete.disconnect(pre_delete_handler) + signals.post_delete.disconnect(post_delete_handler) + + def test_decorators(self): + data = [] + + @receiver(signals.pre_save, weak=False) + def decorated_handler(signal, sender, instance, **kwargs): + data.append(instance) + + @receiver(signals.pre_save, sender=Car, weak=False) + def decorated_handler_with_sender_arg(signal, sender, instance, **kwargs): + data.append(instance) + + try: + c1 = Car.objects.create(make="Volkswagen", model="Passat") + self.assertEqual(data, [c1, c1]) + finally: + signals.pre_save.disconnect(decorated_handler) + signals.pre_save.disconnect(decorated_handler_with_sender_arg, sender=Car) + + def test_save_and_delete_signals_with_m2m(self): + data = [] + + def pre_save_handler(signal, sender, instance, **kwargs): + data.append("pre_save signal, %s" % instance) + if kwargs.get("raw"): + data.append("Is raw") + + def post_save_handler(signal, sender, instance, **kwargs): + data.append("post_save signal, %s" % instance) + if "created" in kwargs: + if kwargs["created"]: + data.append("Is created") + else: + data.append("Is updated") + if kwargs.get("raw"): + data.append("Is raw") + + def pre_delete_handler(signal, sender, instance, **kwargs): + data.append("pre_delete signal, %s" % instance) + data.append("instance.id is not None: %s" % (instance.id is not None)) + + def post_delete_handler(signal, sender, instance, **kwargs): + data.append("post_delete signal, %s" % instance) + data.append("instance.id is not None: %s" % (instance.id is not None)) + + signals.pre_save.connect(pre_save_handler, weak=False) + signals.post_save.connect(post_save_handler, weak=False) + signals.pre_delete.connect(pre_delete_handler, weak=False) + signals.post_delete.connect(post_delete_handler, weak=False) + try: + a1 = Author.objects.create(name="Neal Stephenson") + self.assertEqual( + data, + [ + "pre_save signal, Neal Stephenson", + "post_save signal, Neal Stephenson", + "Is created", + ], + ) + data[:] = [] + + b1 = Book.objects.create(name="Snow Crash") + self.assertEqual( + data, + [ + "pre_save signal, Snow Crash", + "post_save signal, Snow Crash", + "Is created", + ], + ) + data[:] = [] + + # Assigning and removing to/from m2m shouldn't generate an m2m signal. + b1.authors.set([a1]) + self.assertEqual(data, []) + b1.authors.set([]) + self.assertEqual(data, []) + finally: + signals.pre_save.disconnect(pre_save_handler) + signals.post_save.disconnect(post_save_handler) + signals.pre_delete.disconnect(pre_delete_handler) + signals.post_delete.disconnect(post_delete_handler) + + def test_disconnect_in_dispatch(self): + """ + Signals that disconnect when being called don't mess future + dispatching. + """ + + class Handler: + def __init__(self, param): + self.param = param + self._run = False + + def __call__(self, signal, sender, **kwargs): + self._run = True + signal.disconnect(receiver=self, sender=sender) + + a, b = Handler(1), Handler(2) + signals.post_save.connect(a, sender=Person, weak=False) + signals.post_save.connect(b, sender=Person, weak=False) + Person.objects.create(first_name="John", last_name="Smith") + + self.assertTrue(a._run) + self.assertTrue(b._run) + self.assertEqual(signals.post_save.receivers, []) + + @mock.patch("weakref.ref") + def test_lazy_model_signal(self, ref): + def callback(sender, args, **kwargs): + pass + + signals.pre_init.connect(callback) + signals.pre_init.disconnect(callback) + self.assertTrue(ref.called) + ref.reset_mock() + + signals.pre_init.connect(callback, weak=False) + signals.pre_init.disconnect(callback) + ref.assert_not_called() + + @isolate_apps("signals", kwarg_name="apps") + def test_disconnect_model(self, apps): + received = [] + + def receiver(**kwargs): + received.append(kwargs) + + class Created(models.Model): + pass + + signals.post_init.connect(receiver, sender=Created, apps=apps) + try: + self.assertIs( + signals.post_init.disconnect(receiver, sender=Created, apps=apps), + True, + ) + self.assertIs( + signals.post_init.disconnect(receiver, sender=Created, apps=apps), + False, + ) + Created() + self.assertEqual(received, []) + finally: + signals.post_init.disconnect(receiver, sender=Created) + + +class LazyModelRefTests(BaseSignalSetup, SimpleTestCase): + def setUp(self): + super().setUp() + self.received = [] + + def receiver(self, **kwargs): + self.received.append(kwargs) + + def test_invalid_sender_model_name(self): + msg = ( + "Invalid model reference 'invalid'. String model references must be of the " + "form 'app_label.ModelName'." + ) + with self.assertRaisesMessage(ValueError, msg): + signals.post_init.connect(self.receiver, sender="invalid") + + def test_already_loaded_model(self): + signals.post_init.connect(self.receiver, sender="signals.Book", weak=False) + try: + instance = Book() + self.assertEqual( + self.received, + [{"signal": signals.post_init, "sender": Book, "instance": instance}], + ) + finally: + signals.post_init.disconnect(self.receiver, sender=Book) + + @isolate_apps("signals", kwarg_name="apps") + def test_not_loaded_model(self, apps): + signals.post_init.connect( + self.receiver, sender="signals.Created", weak=False, apps=apps + ) + + try: + + class Created(models.Model): + pass + + instance = Created() + self.assertEqual( + self.received, + [ + { + "signal": signals.post_init, + "sender": Created, + "instance": instance, + } + ], + ) + finally: + signals.post_init.disconnect(self.receiver, sender=Created) + + @isolate_apps("signals", kwarg_name="apps") + def test_disconnect_registered_model(self, apps): + received = [] + + def receiver(**kwargs): + received.append(kwargs) + + class Created(models.Model): + pass + + signals.post_init.connect(receiver, sender="signals.Created", apps=apps) + try: + self.assertIsNone( + signals.post_init.disconnect( + receiver, sender="signals.Created", apps=apps + ) + ) + self.assertIsNone( + signals.post_init.disconnect( + receiver, sender="signals.Created", apps=apps + ) + ) + Created() + self.assertEqual(received, []) + finally: + signals.post_init.disconnect(receiver, sender="signals.Created") + + @isolate_apps("signals", kwarg_name="apps") + def test_disconnect_unregistered_model(self, apps): + received = [] + + def receiver(**kwargs): + received.append(kwargs) + + signals.post_init.connect(receiver, sender="signals.Created", apps=apps) + try: + self.assertIsNone( + signals.post_init.disconnect( + receiver, sender="signals.Created", apps=apps + ) + ) + self.assertIsNone( + signals.post_init.disconnect( + receiver, sender="signals.Created", apps=apps + ) + ) + + class Created(models.Model): + pass + + Created() + self.assertEqual(received, []) + finally: + signals.post_init.disconnect(receiver, sender="signals.Created") + + def test_register_model_class_senders_immediately(self): + """ + Model signals registered with model classes as senders don't use the + Apps.lazy_model_operation() mechanism. + """ + # Book isn't registered with apps2, so it will linger in + # apps2._pending_operations if ModelSignal does the wrong thing. + apps2 = Apps() + signals.post_init.connect(self.receiver, sender=Book, apps=apps2) + self.assertEqual(list(apps2._pending_operations), []) + + +class SyncHandler: + param = 0 + + def __call__(self, **kwargs): + self.param += 1 + return self.param + + +class AsyncHandler: + param = 0 + + def __init__(self): + markcoroutinefunction(self) + + async def __call__(self, **kwargs): + self.param += 1 + return self.param + + +class AsyncReceiversTests(SimpleTestCase): + async def test_asend(self): + sync_handler = SyncHandler() + async_handler = AsyncHandler() + signal = dispatch.Signal() + signal.connect(sync_handler) + signal.connect(async_handler) + result = await signal.asend(self.__class__) + self.assertEqual(result, [(sync_handler, 1), (async_handler, 1)]) + + def test_send(self): + sync_handler = SyncHandler() + async_handler = AsyncHandler() + signal = dispatch.Signal() + signal.connect(sync_handler) + signal.connect(async_handler) + result = signal.send(self.__class__) + self.assertEqual(result, [(sync_handler, 1), (async_handler, 1)]) + + def test_send_robust(self): + class ReceiverException(Exception): + pass + + receiver_exception = ReceiverException() + + async def failing_async_handler(**kwargs): + raise receiver_exception + + sync_handler = SyncHandler() + async_handler = AsyncHandler() + signal = dispatch.Signal() + signal.connect(failing_async_handler) + signal.connect(async_handler) + signal.connect(sync_handler) + result = signal.send_robust(self.__class__) + # The ordering here is different than the order that signals were + # connected in. + self.assertEqual( + result, + [ + (sync_handler, 1), + (failing_async_handler, receiver_exception), + (async_handler, 1), + ], + ) + + async def test_asend_robust(self): + class ReceiverException(Exception): + pass + + receiver_exception = ReceiverException() + + async def failing_async_handler(**kwargs): + raise receiver_exception + + sync_handler = SyncHandler() + async_handler = AsyncHandler() + signal = dispatch.Signal() + signal.connect(failing_async_handler) + signal.connect(async_handler) + signal.connect(sync_handler) + result = await signal.asend_robust(self.__class__) + # The ordering here is different than the order that signals were + # connected in. + self.assertEqual( + result, + [ + (sync_handler, 1), + (failing_async_handler, receiver_exception), + (async_handler, 1), + ], + ) diff --git a/testbed/django__django/tests/signed_cookies_tests/__init__.py b/testbed/django__django/tests/signed_cookies_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/signed_cookies_tests/tests.py b/testbed/django__django/tests/signed_cookies_tests/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..876887d883f180961db59c54e5fa47c18b5d19a4 --- /dev/null +++ b/testbed/django__django/tests/signed_cookies_tests/tests.py @@ -0,0 +1,79 @@ +from datetime import timedelta + +from django.core import signing +from django.http import HttpRequest, HttpResponse +from django.test import SimpleTestCase, override_settings +from django.test.utils import freeze_time + + +class SignedCookieTest(SimpleTestCase): + def test_can_set_and_read_signed_cookies(self): + response = HttpResponse() + response.set_signed_cookie("c", "hello") + self.assertIn("c", response.cookies) + self.assertTrue(response.cookies["c"].value.startswith("hello:")) + request = HttpRequest() + request.COOKIES["c"] = response.cookies["c"].value + value = request.get_signed_cookie("c") + self.assertEqual(value, "hello") + + def test_can_use_salt(self): + response = HttpResponse() + response.set_signed_cookie("a", "hello", salt="one") + request = HttpRequest() + request.COOKIES["a"] = response.cookies["a"].value + value = request.get_signed_cookie("a", salt="one") + self.assertEqual(value, "hello") + with self.assertRaises(signing.BadSignature): + request.get_signed_cookie("a", salt="two") + + def test_detects_tampering(self): + response = HttpResponse() + response.set_signed_cookie("c", "hello") + request = HttpRequest() + request.COOKIES["c"] = response.cookies["c"].value[:-2] + "$$" + with self.assertRaises(signing.BadSignature): + request.get_signed_cookie("c") + + def test_default_argument_suppresses_exceptions(self): + response = HttpResponse() + response.set_signed_cookie("c", "hello") + request = HttpRequest() + request.COOKIES["c"] = response.cookies["c"].value[:-2] + "$$" + self.assertIsNone(request.get_signed_cookie("c", default=None)) + + def test_max_age_argument(self): + value = "hello" + with freeze_time(123456789): + response = HttpResponse() + response.set_signed_cookie("c", value) + request = HttpRequest() + request.COOKIES["c"] = response.cookies["c"].value + self.assertEqual(request.get_signed_cookie("c"), value) + + with freeze_time(123456800): + self.assertEqual(request.get_signed_cookie("c", max_age=12), value) + self.assertEqual(request.get_signed_cookie("c", max_age=11), value) + self.assertEqual( + request.get_signed_cookie("c", max_age=timedelta(seconds=11)), value + ) + with self.assertRaises(signing.SignatureExpired): + request.get_signed_cookie("c", max_age=10) + with self.assertRaises(signing.SignatureExpired): + request.get_signed_cookie("c", max_age=timedelta(seconds=10)) + + def test_set_signed_cookie_max_age_argument(self): + response = HttpResponse() + response.set_signed_cookie("c", "value", max_age=100) + self.assertEqual(response.cookies["c"]["max-age"], 100) + response.set_signed_cookie("d", "value", max_age=timedelta(hours=2)) + self.assertEqual(response.cookies["d"]["max-age"], 7200) + + @override_settings(SECRET_KEY=b"\xe7") + def test_signed_cookies_with_binary_key(self): + response = HttpResponse() + response.set_signed_cookie("c", "hello") + + request = HttpRequest() + request.COOKIES["c"] = response.cookies["c"].value + self.assertEqual(request.get_signed_cookie("c"), "hello") diff --git a/testbed/django__django/tests/signing/__init__.py b/testbed/django__django/tests/signing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/signing/tests.py b/testbed/django__django/tests/signing/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..c145ce1e7d7e0499e372d0898cc6afa5ac72c8ec --- /dev/null +++ b/testbed/django__django/tests/signing/tests.py @@ -0,0 +1,262 @@ +import datetime + +from django.core import signing +from django.test import SimpleTestCase, override_settings +from django.test.utils import freeze_time, ignore_warnings +from django.utils.crypto import InvalidAlgorithm +from django.utils.deprecation import RemovedInDjango51Warning + + +class TestSigner(SimpleTestCase): + def test_signature(self): + "signature() method should generate a signature" + signer = signing.Signer(key="predictable-secret") + signer2 = signing.Signer(key="predictable-secret2") + for s in ( + b"hello", + b"3098247:529:087:", + "\u2019".encode(), + ): + self.assertEqual( + signer.signature(s), + signing.base64_hmac( + signer.salt + "signer", + s, + "predictable-secret", + algorithm=signer.algorithm, + ), + ) + self.assertNotEqual(signer.signature(s), signer2.signature(s)) + + def test_signature_with_salt(self): + signer = signing.Signer(key="predictable-secret", salt="extra-salt") + self.assertEqual( + signer.signature("hello"), + signing.base64_hmac( + "extra-salt" + "signer", + "hello", + "predictable-secret", + algorithm=signer.algorithm, + ), + ) + self.assertNotEqual( + signing.Signer(key="predictable-secret", salt="one").signature("hello"), + signing.Signer(key="predictable-secret", salt="two").signature("hello"), + ) + + def test_custom_algorithm(self): + signer = signing.Signer(key="predictable-secret", algorithm="sha512") + self.assertEqual( + signer.signature("hello"), + "Usf3uVQOZ9m6uPfVonKR-EBXjPe7bjMbp3_Fq8MfsptgkkM1ojidN0BxYaT5HAEN1" + "VzO9_jVu7R-VkqknHYNvw", + ) + + def test_invalid_algorithm(self): + signer = signing.Signer(key="predictable-secret", algorithm="whatever") + msg = "'whatever' is not an algorithm accepted by the hashlib module." + with self.assertRaisesMessage(InvalidAlgorithm, msg): + signer.sign("hello") + + def test_sign_unsign(self): + "sign/unsign should be reversible" + signer = signing.Signer(key="predictable-secret") + examples = [ + "q;wjmbk;wkmb", + "3098247529087", + "3098247:529:087:", + "jkw osanteuh ,rcuh nthu aou oauh ,ud du", + "\u2019", + ] + for example in examples: + signed = signer.sign(example) + self.assertIsInstance(signed, str) + self.assertNotEqual(example, signed) + self.assertEqual(example, signer.unsign(signed)) + + def test_sign_unsign_non_string(self): + signer = signing.Signer(key="predictable-secret") + values = [ + 123, + 1.23, + True, + datetime.date.today(), + ] + for value in values: + with self.subTest(value): + signed = signer.sign(value) + self.assertIsInstance(signed, str) + self.assertNotEqual(signed, value) + self.assertEqual(signer.unsign(signed), str(value)) + + def test_unsign_detects_tampering(self): + "unsign should raise an exception if the value has been tampered with" + signer = signing.Signer(key="predictable-secret") + value = "Another string" + signed_value = signer.sign(value) + transforms = ( + lambda s: s.upper(), + lambda s: s + "a", + lambda s: "a" + s[1:], + lambda s: s.replace(":", ""), + ) + self.assertEqual(value, signer.unsign(signed_value)) + for transform in transforms: + with self.assertRaises(signing.BadSignature): + signer.unsign(transform(signed_value)) + + def test_sign_unsign_object(self): + signer = signing.Signer(key="predictable-secret") + tests = [ + ["a", "list"], + "a string \u2019", + {"a": "dictionary"}, + ] + for obj in tests: + with self.subTest(obj=obj): + signed_obj = signer.sign_object(obj) + self.assertNotEqual(obj, signed_obj) + self.assertEqual(obj, signer.unsign_object(signed_obj)) + signed_obj = signer.sign_object(obj, compress=True) + self.assertNotEqual(obj, signed_obj) + self.assertEqual(obj, signer.unsign_object(signed_obj)) + + def test_dumps_loads(self): + "dumps and loads be reversible for any JSON serializable object" + objects = [ + ["a", "list"], + "a string \u2019", + {"a": "dictionary"}, + ] + for o in objects: + self.assertNotEqual(o, signing.dumps(o)) + self.assertEqual(o, signing.loads(signing.dumps(o))) + self.assertNotEqual(o, signing.dumps(o, compress=True)) + self.assertEqual(o, signing.loads(signing.dumps(o, compress=True))) + + def test_decode_detects_tampering(self): + "loads should raise exception for tampered objects" + transforms = ( + lambda s: s.upper(), + lambda s: s + "a", + lambda s: "a" + s[1:], + lambda s: s.replace(":", ""), + ) + value = { + "foo": "bar", + "baz": 1, + } + encoded = signing.dumps(value) + self.assertEqual(value, signing.loads(encoded)) + for transform in transforms: + with self.assertRaises(signing.BadSignature): + signing.loads(transform(encoded)) + + def test_works_with_non_ascii_keys(self): + binary_key = b"\xe7" # Set some binary (non-ASCII key) + + s = signing.Signer(key=binary_key) + self.assertEqual( + "foo:EE4qGC5MEKyQG5msxYA0sBohAxLC0BJf8uRhemh0BGU", + s.sign("foo"), + ) + + def test_valid_sep(self): + separators = ["/", "*sep*", ","] + for sep in separators: + signer = signing.Signer(key="predictable-secret", sep=sep) + self.assertEqual( + "foo%sjZQoX_FtSO70jX9HLRGg2A_2s4kdDBxz1QoO_OpEQb0" % sep, + signer.sign("foo"), + ) + + def test_invalid_sep(self): + """should warn on invalid separator""" + msg = ( + "Unsafe Signer separator: %r (cannot be empty or consist of only A-z0-9-_=)" + ) + separators = ["", "-", "abc"] + for sep in separators: + with self.assertRaisesMessage(ValueError, msg % sep): + signing.Signer(sep=sep) + + def test_verify_with_non_default_key(self): + old_signer = signing.Signer(key="secret") + new_signer = signing.Signer( + key="newsecret", fallback_keys=["othersecret", "secret"] + ) + signed = old_signer.sign("abc") + self.assertEqual(new_signer.unsign(signed), "abc") + + def test_sign_unsign_multiple_keys(self): + """The default key is a valid verification key.""" + signer = signing.Signer(key="secret", fallback_keys=["oldsecret"]) + signed = signer.sign("abc") + self.assertEqual(signer.unsign(signed), "abc") + + @override_settings( + SECRET_KEY="secret", + SECRET_KEY_FALLBACKS=["oldsecret"], + ) + def test_sign_unsign_ignore_secret_key_fallbacks(self): + old_signer = signing.Signer(key="oldsecret") + signed = old_signer.sign("abc") + signer = signing.Signer(fallback_keys=[]) + with self.assertRaises(signing.BadSignature): + signer.unsign(signed) + + @override_settings( + SECRET_KEY="secret", + SECRET_KEY_FALLBACKS=["oldsecret"], + ) + def test_default_keys_verification(self): + old_signer = signing.Signer(key="oldsecret") + signed = old_signer.sign("abc") + signer = signing.Signer() + self.assertEqual(signer.unsign(signed), "abc") + + +class TestTimestampSigner(SimpleTestCase): + def test_timestamp_signer(self): + value = "hello" + with freeze_time(123456789): + signer = signing.TimestampSigner(key="predictable-key") + ts = signer.sign(value) + self.assertNotEqual(ts, signing.Signer(key="predictable-key").sign(value)) + self.assertEqual(signer.unsign(ts), value) + + with freeze_time(123456800): + self.assertEqual(signer.unsign(ts, max_age=12), value) + # max_age parameter can also accept a datetime.timedelta object + self.assertEqual( + signer.unsign(ts, max_age=datetime.timedelta(seconds=11)), value + ) + with self.assertRaises(signing.SignatureExpired): + signer.unsign(ts, max_age=10) + + +class TestBase62(SimpleTestCase): + def test_base62(self): + tests = [-(10**10), 10**10, 1620378259, *range(-100, 100)] + for i in tests: + self.assertEqual(i, signing.b62_decode(signing.b62_encode(i))) + + +class SignerPositionalArgumentsDeprecationTests(SimpleTestCase): + def test_deprecation(self): + msg = "Passing positional arguments to Signer is deprecated." + with self.assertRaisesMessage(RemovedInDjango51Warning, msg): + signing.Signer("predictable-secret") + msg = "Passing positional arguments to TimestampSigner is deprecated." + with self.assertRaisesMessage(RemovedInDjango51Warning, msg): + signing.TimestampSigner("predictable-secret") + + @ignore_warnings(category=RemovedInDjango51Warning) + def test_positional_arguments(self): + signer = signing.Signer("secret", "/", "somesalt", "sha1", ["oldsecret"]) + signed = signer.sign("xyz") + self.assertEqual(signed, "xyz/zzdO_8rk-NGnm8jNasXRTF2P5kY") + self.assertEqual(signer.unsign(signed), "xyz") + old_signer = signing.Signer("oldsecret", "/", "somesalt", "sha1") + signed = old_signer.sign("xyz") + self.assertEqual(signer.unsign(signed), "xyz") diff --git a/testbed/django__django/tests/sitemaps_tests/__init__.py b/testbed/django__django/tests/sitemaps_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/sitemaps_tests/base.py b/testbed/django__django/tests/sitemaps_tests/base.py new file mode 100644 index 0000000000000000000000000000000000000000..ba9902c6f72454a2d5d9d6cddeb4c693a3c8d6ba --- /dev/null +++ b/testbed/django__django/tests/sitemaps_tests/base.py @@ -0,0 +1,31 @@ +from django.apps import apps +from django.contrib.sites.models import Site +from django.core.cache import cache +from django.test import TestCase, modify_settings, override_settings + +from .models import I18nTestModel, TestModel + + +@modify_settings(INSTALLED_APPS={"append": "django.contrib.sitemaps"}) +@override_settings(ROOT_URLCONF="sitemaps_tests.urls.http") +class SitemapTestsBase(TestCase): + protocol = "http" + sites_installed = apps.is_installed("django.contrib.sites") + domain = "example.com" if sites_installed else "testserver" + + @classmethod + def setUpTestData(cls): + # Create an object for sitemap content. + TestModel.objects.create(name="Test Object") + cls.i18n_model = I18nTestModel.objects.create(name="Test Object") + + def setUp(self): + self.base_url = "%s://%s" % (self.protocol, self.domain) + cache.clear() + + @classmethod + def setUpClass(cls): + super().setUpClass() + # This cleanup is necessary because contrib.sites cache + # makes tests interfere with each other, see #11505 + Site.objects.clear_cache() diff --git a/testbed/django__django/tests/sitemaps_tests/models.py b/testbed/django__django/tests/sitemaps_tests/models.py new file mode 100644 index 0000000000000000000000000000000000000000..45edc7e7c8c15247d9410df298eaad37743b34df --- /dev/null +++ b/testbed/django__django/tests/sitemaps_tests/models.py @@ -0,0 +1,17 @@ +from django.db import models +from django.urls import reverse + + +class TestModel(models.Model): + name = models.CharField(max_length=100) + lastmod = models.DateTimeField(null=True) + + def get_absolute_url(self): + return "/testmodel/%s/" % self.id + + +class I18nTestModel(models.Model): + name = models.CharField(max_length=100) + + def get_absolute_url(self): + return reverse("i18n_testmodel", args=[self.id]) diff --git a/testbed/django__django/tests/sitemaps_tests/templates/custom_sitemap.xml b/testbed/django__django/tests/sitemaps_tests/templates/custom_sitemap.xml new file mode 100644 index 0000000000000000000000000000000000000000..594aef1a3e1c488653595705cfc1e7bbe0f1bc50 --- /dev/null +++ b/testbed/django__django/tests/sitemaps_tests/templates/custom_sitemap.xml @@ -0,0 +1,14 @@ + + + +{% spaceless %} +{% for url in urlset %} + + {{ url.location }} + {% if url.lastmod %}{{ url.lastmod|date:"Y-m-d" }}{% endif %} + {% if url.changefreq %}{{ url.changefreq }}{% endif %} + {% if url.priority %}{{ url.priority }}{% endif %} + +{% endfor %} +{% endspaceless %} + diff --git a/testbed/django__django/tests/sitemaps_tests/templates/custom_sitemap_lastmod_index.xml b/testbed/django__django/tests/sitemaps_tests/templates/custom_sitemap_lastmod_index.xml new file mode 100644 index 0000000000000000000000000000000000000000..dcf5e3258446aa42670378d50845408c8ebf991b --- /dev/null +++ b/testbed/django__django/tests/sitemaps_tests/templates/custom_sitemap_lastmod_index.xml @@ -0,0 +1,14 @@ + + + +{% spaceless %} +{% for site in sitemaps %} + + {{ site.location }} + {% if site.last_mod %} + {{ site.last_mod|date:"c" }} + {% endif %} + +{% endfor %} +{% endspaceless %} + diff --git a/testbed/django__django/tests/sitemaps_tests/test_generic.py b/testbed/django__django/tests/sitemaps_tests/test_generic.py new file mode 100644 index 0000000000000000000000000000000000000000..dc998eec93d63de5a6ee9e861c453dab2f2c3b8a --- /dev/null +++ b/testbed/django__django/tests/sitemaps_tests/test_generic.py @@ -0,0 +1,92 @@ +from datetime import datetime + +from django.contrib.sitemaps import GenericSitemap +from django.test import override_settings + +from .base import SitemapTestsBase +from .models import TestModel + + +@override_settings(ABSOLUTE_URL_OVERRIDES={}) +class GenericViewsSitemapTests(SitemapTestsBase): + def test_generic_sitemap_attributes(self): + datetime_value = datetime.now() + queryset = TestModel.objects.all() + generic_sitemap = GenericSitemap( + info_dict={ + "queryset": queryset, + "date_field": datetime_value, + }, + priority=0.6, + changefreq="monthly", + protocol="https", + ) + attr_values = ( + ("date_field", datetime_value), + ("priority", 0.6), + ("changefreq", "monthly"), + ("protocol", "https"), + ) + for attr_name, expected_value in attr_values: + with self.subTest(attr_name=attr_name): + self.assertEqual(getattr(generic_sitemap, attr_name), expected_value) + self.assertCountEqual(generic_sitemap.queryset, queryset) + + def test_generic_sitemap(self): + "A minimal generic sitemap can be rendered" + response = self.client.get("/generic/sitemap.xml") + expected = "" + for pk in TestModel.objects.values_list("id", flat=True): + expected += "%s/testmodel/%s/" % (self.base_url, pk) + expected_content = ( + '\n' + '\n' + "%s\n" + "" + ) % expected + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_generic_sitemap_lastmod(self): + test_model = TestModel.objects.first() + TestModel.objects.update(lastmod=datetime(2013, 3, 13, 10, 0, 0)) + response = self.client.get("/generic-lastmod/sitemap.xml") + expected_content = ( + '\n' + '\n' + "%s/testmodel/%s/2013-03-13\n" + "" + ) % ( + self.base_url, + test_model.pk, + ) + self.assertXMLEqual(response.content.decode(), expected_content) + self.assertEqual( + response.headers["Last-Modified"], "Wed, 13 Mar 2013 10:00:00 GMT" + ) + + def test_get_protocol_defined_in_constructor(self): + for protocol in ["http", "https"]: + with self.subTest(protocol=protocol): + sitemap = GenericSitemap({"queryset": None}, protocol=protocol) + self.assertEqual(sitemap.get_protocol(), protocol) + + def test_get_protocol_passed_as_argument(self): + sitemap = GenericSitemap({"queryset": None}) + for protocol in ["http", "https"]: + with self.subTest(protocol=protocol): + self.assertEqual(sitemap.get_protocol(protocol), protocol) + + def test_get_protocol_default(self): + sitemap = GenericSitemap({"queryset": None}) + self.assertEqual(sitemap.get_protocol(), "https") + + def test_generic_sitemap_index(self): + TestModel.objects.update(lastmod=datetime(2013, 3, 13, 10, 0, 0)) + response = self.client.get("/generic-lastmod/index.xml") + expected_content = """ + +http://example.com/simple/sitemap-generic.xml2013-03-13T10:00:00 +""" + self.assertXMLEqual(response.content.decode("utf-8"), expected_content) diff --git a/testbed/django__django/tests/sitemaps_tests/test_http.py b/testbed/django__django/tests/sitemaps_tests/test_http.py new file mode 100644 index 0000000000000000000000000000000000000000..74d183a7b0f52b1b1d059cec32bf704efa2f55f5 --- /dev/null +++ b/testbed/django__django/tests/sitemaps_tests/test_http.py @@ -0,0 +1,580 @@ +import os +from datetime import date + +from django.contrib.sitemaps import Sitemap +from django.contrib.sites.models import Site +from django.core.exceptions import ImproperlyConfigured +from django.test import modify_settings, override_settings +from django.utils import translation +from django.utils.formats import localize + +from .base import SitemapTestsBase +from .models import I18nTestModel, TestModel + + +class HTTPSitemapTests(SitemapTestsBase): + use_sitemap_err_msg = ( + "To use sitemaps, either enable the sites framework or pass a " + "Site/RequestSite object in your view." + ) + + def test_simple_sitemap_index(self): + "A simple sitemap index can be rendered" + response = self.client.get("/simple/index.xml") + expected_content = """ + +%s/simple/sitemap-simple.xml%s + +""" % ( + self.base_url, + date.today(), + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_sitemap_not_callable(self): + """A sitemap may not be callable.""" + response = self.client.get("/simple-not-callable/index.xml") + expected_content = """ + +%s/simple/sitemap-simple.xml%s + +""" % ( + self.base_url, + date.today(), + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_paged_sitemap(self): + """A sitemap may have multiple pages.""" + response = self.client.get("/simple-paged/index.xml") + expected_content = """ + +{0}/simple/sitemap-simple.xml{1}{0}/simple/sitemap-simple.xml?p=2{1} + +""".format( + self.base_url, date.today() + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + @override_settings( + TEMPLATES=[ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [os.path.join(os.path.dirname(__file__), "templates")], + } + ] + ) + def test_simple_sitemap_custom_lastmod_index(self): + "A simple sitemap index can be rendered with a custom template" + response = self.client.get("/simple/custom-lastmod-index.xml") + expected_content = """ + + +%s/simple/sitemap-simple.xml%s + +""" % ( + self.base_url, + date.today(), + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_simple_sitemap_section(self): + "A simple sitemap section can be rendered" + response = self.client.get("/simple/sitemap-simple.xml") + expected_content = ( + '\n' + '\n' + "%s/location/%s" + "never0.5\n" + "" + ) % ( + self.base_url, + date.today(), + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_no_section(self): + response = self.client.get("/simple/sitemap-simple2.xml") + self.assertEqual( + str(response.context["exception"]), + "No sitemap available for section: 'simple2'", + ) + self.assertEqual(response.status_code, 404) + + def test_empty_page(self): + response = self.client.get("/simple/sitemap-simple.xml?p=0") + self.assertEqual(str(response.context["exception"]), "Page 0 empty") + self.assertEqual(response.status_code, 404) + + def test_page_not_int(self): + response = self.client.get("/simple/sitemap-simple.xml?p=test") + self.assertEqual(str(response.context["exception"]), "No page 'test'") + self.assertEqual(response.status_code, 404) + + def test_simple_sitemap(self): + "A simple sitemap can be rendered" + response = self.client.get("/simple/sitemap.xml") + expected_content = ( + '\n' + '\n' + "%s/location/%s" + "never0.5\n" + "" + ) % ( + self.base_url, + date.today(), + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + @override_settings( + TEMPLATES=[ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [os.path.join(os.path.dirname(__file__), "templates")], + } + ] + ) + def test_simple_custom_sitemap(self): + "A simple sitemap can be rendered with a custom template" + response = self.client.get("/simple/custom-sitemap.xml") + expected_content = """ + + +%s/location/%snever0.5 + +""" % ( + self.base_url, + date.today(), + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_sitemap_last_modified(self): + "Last-Modified header is set correctly" + response = self.client.get("/lastmod/sitemap.xml") + self.assertEqual( + response.headers["Last-Modified"], "Wed, 13 Mar 2013 10:00:00 GMT" + ) + + def test_sitemap_last_modified_date(self): + """ + The Last-Modified header should be support dates (without time). + """ + response = self.client.get("/lastmod/date-sitemap.xml") + self.assertEqual( + response.headers["Last-Modified"], "Wed, 13 Mar 2013 00:00:00 GMT" + ) + + def test_sitemap_last_modified_tz(self): + """ + The Last-Modified header should be converted from timezone aware dates + to GMT. + """ + response = self.client.get("/lastmod/tz-sitemap.xml") + self.assertEqual( + response.headers["Last-Modified"], "Wed, 13 Mar 2013 15:00:00 GMT" + ) + + def test_sitemap_last_modified_missing(self): + "Last-Modified header is missing when sitemap has no lastmod" + response = self.client.get("/generic/sitemap.xml") + self.assertFalse(response.has_header("Last-Modified")) + + def test_sitemap_last_modified_mixed(self): + "Last-Modified header is omitted when lastmod not on all items" + response = self.client.get("/lastmod-mixed/sitemap.xml") + self.assertFalse(response.has_header("Last-Modified")) + + def test_sitemaps_lastmod_mixed_ascending_last_modified_missing(self): + """ + The Last-Modified header is omitted when lastmod isn't found in all + sitemaps. Test sitemaps are sorted by lastmod in ascending order. + """ + response = self.client.get("/lastmod-sitemaps/mixed-ascending.xml") + self.assertFalse(response.has_header("Last-Modified")) + + def test_sitemaps_lastmod_mixed_descending_last_modified_missing(self): + """ + The Last-Modified header is omitted when lastmod isn't found in all + sitemaps. Test sitemaps are sorted by lastmod in descending order. + """ + response = self.client.get("/lastmod-sitemaps/mixed-descending.xml") + self.assertFalse(response.has_header("Last-Modified")) + + def test_sitemaps_lastmod_ascending(self): + """ + The Last-Modified header is set to the most recent sitemap lastmod. + Test sitemaps are sorted by lastmod in ascending order. + """ + response = self.client.get("/lastmod-sitemaps/ascending.xml") + self.assertEqual( + response.headers["Last-Modified"], "Sat, 20 Apr 2013 05:00:00 GMT" + ) + + def test_sitemaps_lastmod_descending(self): + """ + The Last-Modified header is set to the most recent sitemap lastmod. + Test sitemaps are sorted by lastmod in descending order. + """ + response = self.client.get("/lastmod-sitemaps/descending.xml") + self.assertEqual( + response.headers["Last-Modified"], "Sat, 20 Apr 2013 05:00:00 GMT" + ) + + def test_sitemap_get_latest_lastmod_none(self): + """ + sitemapindex.lastmod is omitted when Sitemap.lastmod is + callable and Sitemap.get_latest_lastmod is not implemented + """ + response = self.client.get("/lastmod/get-latest-lastmod-none-sitemap.xml") + self.assertNotContains(response, "") + + def test_sitemap_get_latest_lastmod(self): + """ + sitemapindex.lastmod is included when Sitemap.lastmod is + attribute and Sitemap.get_latest_lastmod is implemented + """ + response = self.client.get("/lastmod/get-latest-lastmod-sitemap.xml") + self.assertContains(response, "2013-03-13T10:00:00") + + def test_sitemap_latest_lastmod_timezone(self): + """ + lastmod datestamp shows timezones if Sitemap.get_latest_lastmod + returns an aware datetime. + """ + response = self.client.get("/lastmod/latest-lastmod-timezone-sitemap.xml") + self.assertContains(response, "2013-03-13T10:00:00-05:00") + + def test_localized_priority(self): + """The priority value should not be localized.""" + with translation.override("fr"): + self.assertEqual("0,3", localize(0.3)) + # Priorities aren't rendered in localized format. + response = self.client.get("/simple/sitemap.xml") + self.assertContains(response, "0.5") + self.assertContains(response, "%s" % date.today()) + + @modify_settings(INSTALLED_APPS={"remove": "django.contrib.sites"}) + def test_requestsite_sitemap(self): + # Hitting the flatpages sitemap without the sites framework installed + # doesn't raise an exception. + response = self.client.get("/simple/sitemap.xml") + expected_content = ( + '\n' + '\n' + "http://testserver/location/%s" + "never0.5\n" + "" + ) % date.today() + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_sitemap_get_urls_no_site_1(self): + """ + Check we get ImproperlyConfigured if we don't pass a site object to + Sitemap.get_urls and no Site objects exist + """ + Site.objects.all().delete() + with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg): + Sitemap().get_urls() + + @modify_settings(INSTALLED_APPS={"remove": "django.contrib.sites"}) + def test_sitemap_get_urls_no_site_2(self): + """ + Check we get ImproperlyConfigured when we don't pass a site object to + Sitemap.get_urls if Site objects exists, but the sites framework is not + actually installed. + """ + with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg): + Sitemap().get_urls() + + def test_sitemap_item(self): + """ + Check to make sure that the raw item is included with each + Sitemap.get_url() url result. + """ + test_sitemap = Sitemap() + test_sitemap.items = TestModel.objects.order_by("pk").all + + def is_testmodel(url): + return isinstance(url["item"], TestModel) + + item_in_url_info = all(map(is_testmodel, test_sitemap.get_urls())) + self.assertTrue(item_in_url_info) + + def test_cached_sitemap_index(self): + """ + A cached sitemap index can be rendered (#2713). + """ + response = self.client.get("/cached/index.xml") + expected_content = """ + +%s/cached/sitemap-simple.xml%s + +""" % ( + self.base_url, + date.today(), + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_x_robots_sitemap(self): + response = self.client.get("/simple/index.xml") + self.assertEqual(response.headers["X-Robots-Tag"], "noindex, noodp, noarchive") + + response = self.client.get("/simple/sitemap.xml") + self.assertEqual(response.headers["X-Robots-Tag"], "noindex, noodp, noarchive") + + def test_empty_sitemap(self): + response = self.client.get("/empty/sitemap.xml") + self.assertEqual(response.status_code, 200) + + @override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese"))) + def test_simple_i18n_sitemap_index(self): + """ + A simple i18n sitemap index can be rendered, without logging variable + lookup errors. + """ + with self.assertNoLogs("django.template", "DEBUG"): + response = self.client.get("/simple/i18n.xml") + expected_content = ( + '\n' + '\n' + "{0}/en/i18n/testmodel/{1}/never" + "0.5{0}/pt/i18n/testmodel/{1}/" + "never0.5\n" + "" + ).format(self.base_url, self.i18n_model.pk) + self.assertXMLEqual(response.content.decode(), expected_content) + + @override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese"))) + def test_alternate_i18n_sitemap_index(self): + """ + A i18n sitemap with alternate/hreflang links can be rendered. + """ + response = self.client.get("/alternates/i18n.xml") + url, pk = self.base_url, self.i18n_model.pk + expected_urls = f""" +{url}/en/i18n/testmodel/{pk}/never0.5 + + + +{url}/pt/i18n/testmodel/{pk}/never0.5 + + + +""".replace( + "\n", "" + ) + expected_content = ( + f'\n' + f'\n' + f"{expected_urls}\n" + f"" + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + @override_settings( + LANGUAGES=(("en", "English"), ("pt", "Portuguese"), ("es", "Spanish")) + ) + def test_alternate_i18n_sitemap_limited(self): + """ + A i18n sitemap index with limited languages can be rendered. + """ + response = self.client.get("/limited/i18n.xml") + url, pk = self.base_url, self.i18n_model.pk + expected_urls = f""" +{url}/en/i18n/testmodel/{pk}/never0.5 + + + +{url}/es/i18n/testmodel/{pk}/never0.5 + + + +""".replace( + "\n", "" + ) + expected_content = ( + f'\n' + f'\n' + f"{expected_urls}\n" + f"" + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + @override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese"))) + def test_alternate_i18n_sitemap_xdefault(self): + """ + A i18n sitemap index with x-default can be rendered. + """ + response = self.client.get("/x-default/i18n.xml") + url, pk = self.base_url, self.i18n_model.pk + expected_urls = f""" +{url}/en/i18n/testmodel/{pk}/never0.5 + + + + +{url}/pt/i18n/testmodel/{pk}/never0.5 + + + + +""".replace( + "\n", "" + ) + expected_content = ( + f'\n' + f'\n' + f"{expected_urls}\n" + f"" + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + @override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese"))) + def test_language_for_item_i18n_sitemap(self): + """ + A i18n sitemap index in which item can be chosen to be displayed for a + lang or not. + """ + only_pt = I18nTestModel.objects.create(name="Only for PT") + response = self.client.get("/item-by-lang/i18n.xml") + url, pk, only_pt_pk = self.base_url, self.i18n_model.pk, only_pt.pk + expected_urls = ( + f"{url}/en/i18n/testmodel/{pk}/" + f"never0.5" + f"{url}/pt/i18n/testmodel/{pk}/" + f"never0.5" + f"{url}/pt/i18n/testmodel/{only_pt_pk}/" + f"never0.5" + ) + expected_content = ( + f'\n' + f'\n' + f"{expected_urls}\n" + f"" + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + @override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese"))) + def test_alternate_language_for_item_i18n_sitemap(self): + """ + A i18n sitemap index in which item can be chosen to be displayed for a + lang or not. + """ + only_pt = I18nTestModel.objects.create(name="Only for PT") + response = self.client.get("/item-by-lang-alternates/i18n.xml") + url, pk, only_pt_pk = self.base_url, self.i18n_model.pk, only_pt.pk + expected_urls = ( + f"{url}/en/i18n/testmodel/{pk}/" + f"never0.5" + f'' + f'' + f'' + f"{url}/pt/i18n/testmodel/{pk}/" + f"never0.5" + f'' + f'' + f'' + f"{url}/pt/i18n/testmodel/{only_pt_pk}/" + f"never0.5" + f'' + ) + expected_content = ( + f'\n' + f'\n' + f"{expected_urls}\n" + f"" + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_sitemap_without_entries(self): + response = self.client.get("/sitemap-without-entries/sitemap.xml") + expected_content = ( + '\n' + '\n\n' + "" + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_callable_sitemod_partial(self): + """ + Not all items have `lastmod`. Therefore the `Last-Modified` header + is not set by the detail or index sitemap view. + """ + index_response = self.client.get("/callable-lastmod-partial/index.xml") + sitemap_response = self.client.get("/callable-lastmod-partial/sitemap.xml") + self.assertNotIn("Last-Modified", index_response) + self.assertNotIn("Last-Modified", sitemap_response) + expected_content_index = """ + + http://example.com/simple/sitemap-callable-lastmod.xml + + """ + expected_content_sitemap = ( + '\n' + '\n' + "http://example.com/location/" + "2013-03-13" + "http://example.com/location/\n" + "" + ) + self.assertXMLEqual(index_response.content.decode(), expected_content_index) + self.assertXMLEqual(sitemap_response.content.decode(), expected_content_sitemap) + + def test_callable_sitemod_full(self): + """ + All items in the sitemap have `lastmod`. The `Last-Modified` header + is set for the detail and index sitemap view. + """ + index_response = self.client.get("/callable-lastmod-full/index.xml") + sitemap_response = self.client.get("/callable-lastmod-full/sitemap.xml") + self.assertEqual( + index_response.headers["Last-Modified"], "Thu, 13 Mar 2014 10:00:00 GMT" + ) + self.assertEqual( + sitemap_response.headers["Last-Modified"], "Thu, 13 Mar 2014 10:00:00 GMT" + ) + expected_content_index = """ + + http://example.com/simple/sitemap-callable-lastmod.xml2014-03-13T10:00:00 + + """ + expected_content_sitemap = ( + '\n' + '\n' + "http://example.com/location/" + "2013-03-13" + "http://example.com/location/" + "2014-03-13\n" + "" + ) + self.assertXMLEqual(index_response.content.decode(), expected_content_index) + self.assertXMLEqual(sitemap_response.content.decode(), expected_content_sitemap) + + def test_callable_sitemod_no_items(self): + index_response = self.client.get("/callable-lastmod-no-items/index.xml") + self.assertNotIn("Last-Modified", index_response) + expected_content_index = """ + + http://example.com/simple/sitemap-callable-lastmod.xml + + """ + self.assertXMLEqual(index_response.content.decode(), expected_content_index) diff --git a/testbed/django__django/tests/sitemaps_tests/test_https.py b/testbed/django__django/tests/sitemaps_tests/test_https.py new file mode 100644 index 0000000000000000000000000000000000000000..2eae71e4cc497cb7d3c490a86111604ca273d112 --- /dev/null +++ b/testbed/django__django/tests/sitemaps_tests/test_https.py @@ -0,0 +1,73 @@ +from datetime import date + +from django.test import override_settings + +from .base import SitemapTestsBase + + +@override_settings(ROOT_URLCONF="sitemaps_tests.urls.https") +class HTTPSSitemapTests(SitemapTestsBase): + protocol = "https" + + def test_secure_sitemap_index(self): + "A secure sitemap index can be rendered" + response = self.client.get("/secure/index.xml") + expected_content = """ + +%s/secure/sitemap-simple.xml%s + +""" % ( + self.base_url, + date.today(), + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_secure_sitemap_section(self): + "A secure sitemap section can be rendered" + response = self.client.get("/secure/sitemap-simple.xml") + expected_content = ( + '\n' + '\n' + "%s/location/%s" + "never0.5\n" + "" + ) % ( + self.base_url, + date.today(), + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + +@override_settings(SECURE_PROXY_SSL_HEADER=False) +class HTTPSDetectionSitemapTests(SitemapTestsBase): + extra = {"wsgi.url_scheme": "https"} + + def test_sitemap_index_with_https_request(self): + "A sitemap index requested in HTTPS is rendered with HTTPS links" + response = self.client.get("/simple/index.xml", **self.extra) + expected_content = """ + +%s/simple/sitemap-simple.xml%s + +""" % ( + self.base_url.replace("http://", "https://"), + date.today(), + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + def test_sitemap_section_with_https_request(self): + "A sitemap section requested in HTTPS is rendered with HTTPS links" + response = self.client.get("/simple/sitemap-simple.xml", **self.extra) + expected_content = ( + '\n' + '\n' + "%s/location/%s" + "never0.5\n" + "" + ) % ( + self.base_url.replace("http://", "https://"), + date.today(), + ) + self.assertXMLEqual(response.content.decode(), expected_content) diff --git a/testbed/django__django/tests/sitemaps_tests/urls/__init__.py b/testbed/django__django/tests/sitemaps_tests/urls/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/sitemaps_tests/urls/http.py b/testbed/django__django/tests/sitemaps_tests/urls/http.py new file mode 100644 index 0000000000000000000000000000000000000000..db549b4a3872d92c071740585f3c9ba80c0cb9c6 --- /dev/null +++ b/testbed/django__django/tests/sitemaps_tests/urls/http.py @@ -0,0 +1,480 @@ +from datetime import date, datetime + +from django.conf.urls.i18n import i18n_patterns +from django.contrib.sitemaps import GenericSitemap, Sitemap, views +from django.http import HttpResponse +from django.urls import path +from django.utils import timezone +from django.views.decorators.cache import cache_page + +from ..models import I18nTestModel, TestModel + + +class SimpleSitemap(Sitemap): + changefreq = "never" + priority = 0.5 + location = "/location/" + lastmod = date.today() + + def items(self): + return [object()] + + +class SimplePagedSitemap(Sitemap): + lastmod = date.today() + + def items(self): + return [object() for x in range(Sitemap.limit + 1)] + + +class SimpleI18nSitemap(Sitemap): + changefreq = "never" + priority = 0.5 + i18n = True + + def items(self): + return I18nTestModel.objects.order_by("pk").all() + + +class AlternatesI18nSitemap(SimpleI18nSitemap): + alternates = True + + +class LimitedI18nSitemap(AlternatesI18nSitemap): + languages = ["en", "es"] + + +class XDefaultI18nSitemap(AlternatesI18nSitemap): + x_default = True + + +class ItemByLangSitemap(SimpleI18nSitemap): + def get_languages_for_item(self, item): + if item.name == "Only for PT": + return ["pt"] + return super().get_languages_for_item(item) + + +class ItemByLangAlternatesSitemap(AlternatesI18nSitemap): + x_default = True + + def get_languages_for_item(self, item): + if item.name == "Only for PT": + return ["pt"] + return super().get_languages_for_item(item) + + +class EmptySitemap(Sitemap): + changefreq = "never" + priority = 0.5 + location = "/location/" + + +class FixedLastmodSitemap(SimpleSitemap): + lastmod = datetime(2013, 3, 13, 10, 0, 0) + + +class FixedLastmodMixedSitemap(Sitemap): + changefreq = "never" + priority = 0.5 + location = "/location/" + loop = 0 + + def items(self): + o1 = TestModel() + o1.lastmod = datetime(2013, 3, 13, 10, 0, 0) + o2 = TestModel() + return [o1, o2] + + +class FixedNewerLastmodSitemap(SimpleSitemap): + lastmod = datetime(2013, 4, 20, 5, 0, 0) + + +class DateSiteMap(SimpleSitemap): + lastmod = date(2013, 3, 13) + + +class TimezoneSiteMap(SimpleSitemap): + lastmod = datetime(2013, 3, 13, 10, 0, 0, tzinfo=timezone.get_fixed_timezone(-300)) + + +class CallableLastmodPartialSitemap(Sitemap): + """Not all items have `lastmod`.""" + + location = "/location/" + + def items(self): + o1 = TestModel() + o1.lastmod = datetime(2013, 3, 13, 10, 0, 0) + o2 = TestModel() + return [o1, o2] + + def lastmod(self, obj): + return obj.lastmod + + +class CallableLastmodFullSitemap(Sitemap): + """All items have `lastmod`.""" + + location = "/location/" + + def items(self): + o1 = TestModel() + o1.lastmod = datetime(2013, 3, 13, 10, 0, 0) + o2 = TestModel() + o2.lastmod = datetime(2014, 3, 13, 10, 0, 0) + return [o1, o2] + + def lastmod(self, obj): + return obj.lastmod + + +class CallableLastmodNoItemsSitemap(Sitemap): + location = "/location/" + + def items(self): + return [] + + def lastmod(self, obj): + return obj.lastmod + + +class GetLatestLastmodNoneSiteMap(Sitemap): + changefreq = "never" + priority = 0.5 + location = "/location/" + + def items(self): + return [object()] + + def lastmod(self, obj): + return datetime(2013, 3, 13, 10, 0, 0) + + def get_latest_lastmod(self): + return None + + +class GetLatestLastmodSiteMap(SimpleSitemap): + def get_latest_lastmod(self): + return datetime(2013, 3, 13, 10, 0, 0) + + +def testmodelview(request, id): + return HttpResponse() + + +simple_sitemaps = { + "simple": SimpleSitemap, +} + +simple_i18n_sitemaps = { + "i18n": SimpleI18nSitemap, +} + +alternates_i18n_sitemaps = { + "i18n-alternates": AlternatesI18nSitemap, +} + +limited_i18n_sitemaps = { + "i18n-limited": LimitedI18nSitemap, +} + +xdefault_i18n_sitemaps = { + "i18n-xdefault": XDefaultI18nSitemap, +} + +item_by_lang_i18n_sitemaps = { + "i18n-item-by-lang": ItemByLangSitemap, +} + +item_by_lang_alternates_i18n_sitemaps = { + "i18n-item-by-lang-alternates": ItemByLangAlternatesSitemap, +} + +simple_sitemaps_not_callable = { + "simple": SimpleSitemap(), +} + +simple_sitemaps_paged = { + "simple": SimplePagedSitemap, +} + +empty_sitemaps = { + "empty": EmptySitemap, +} + +fixed_lastmod_sitemaps = { + "fixed-lastmod": FixedLastmodSitemap, +} + +fixed_lastmod_mixed_sitemaps = { + "fixed-lastmod-mixed": FixedLastmodMixedSitemap, +} + +sitemaps_lastmod_mixed_ascending = { + "no-lastmod": EmptySitemap, + "lastmod": FixedLastmodSitemap, +} + +sitemaps_lastmod_mixed_descending = { + "lastmod": FixedLastmodSitemap, + "no-lastmod": EmptySitemap, +} + +sitemaps_lastmod_ascending = { + "date": DateSiteMap, + "datetime": FixedLastmodSitemap, + "datetime-newer": FixedNewerLastmodSitemap, +} + +sitemaps_lastmod_descending = { + "datetime-newer": FixedNewerLastmodSitemap, + "datetime": FixedLastmodSitemap, + "date": DateSiteMap, +} + +generic_sitemaps = { + "generic": GenericSitemap({"queryset": TestModel.objects.order_by("pk").all()}), +} + +get_latest_lastmod_none_sitemaps = { + "get-latest-lastmod-none": GetLatestLastmodNoneSiteMap, +} + +get_latest_lastmod_sitemaps = { + "get-latest-lastmod": GetLatestLastmodSiteMap, +} + +latest_lastmod_timezone_sitemaps = { + "latest-lastmod-timezone": TimezoneSiteMap, +} + +generic_sitemaps_lastmod = { + "generic": GenericSitemap( + { + "queryset": TestModel.objects.order_by("pk").all(), + "date_field": "lastmod", + } + ), +} + +callable_lastmod_partial_sitemap = { + "callable-lastmod": CallableLastmodPartialSitemap, +} + +callable_lastmod_full_sitemap = { + "callable-lastmod": CallableLastmodFullSitemap, +} + +callable_lastmod_no_items_sitemap = { + "callable-lastmod": CallableLastmodNoItemsSitemap, +} + +urlpatterns = [ + path("simple/index.xml", views.index, {"sitemaps": simple_sitemaps}), + path("simple-paged/index.xml", views.index, {"sitemaps": simple_sitemaps_paged}), + path( + "simple-not-callable/index.xml", + views.index, + {"sitemaps": simple_sitemaps_not_callable}, + ), + path( + "simple/custom-lastmod-index.xml", + views.index, + { + "sitemaps": simple_sitemaps, + "template_name": "custom_sitemap_lastmod_index.xml", + }, + ), + path( + "simple/sitemap-
    .xml", + views.sitemap, + {"sitemaps": simple_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "simple/sitemap.xml", + views.sitemap, + {"sitemaps": simple_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "simple/i18n.xml", + views.sitemap, + {"sitemaps": simple_i18n_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "alternates/i18n.xml", + views.sitemap, + {"sitemaps": alternates_i18n_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "limited/i18n.xml", + views.sitemap, + {"sitemaps": limited_i18n_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "x-default/i18n.xml", + views.sitemap, + {"sitemaps": xdefault_i18n_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "simple/custom-sitemap.xml", + views.sitemap, + {"sitemaps": simple_sitemaps, "template_name": "custom_sitemap.xml"}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "empty/sitemap.xml", + views.sitemap, + {"sitemaps": empty_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "lastmod/sitemap.xml", + views.sitemap, + {"sitemaps": fixed_lastmod_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "lastmod-mixed/sitemap.xml", + views.sitemap, + {"sitemaps": fixed_lastmod_mixed_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "lastmod/date-sitemap.xml", + views.sitemap, + {"sitemaps": {"date-sitemap": DateSiteMap}}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "lastmod/tz-sitemap.xml", + views.sitemap, + {"sitemaps": {"tz-sitemap": TimezoneSiteMap}}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "lastmod-sitemaps/mixed-ascending.xml", + views.sitemap, + {"sitemaps": sitemaps_lastmod_mixed_ascending}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "lastmod-sitemaps/mixed-descending.xml", + views.sitemap, + {"sitemaps": sitemaps_lastmod_mixed_descending}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "lastmod-sitemaps/ascending.xml", + views.sitemap, + {"sitemaps": sitemaps_lastmod_ascending}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "item-by-lang/i18n.xml", + views.sitemap, + {"sitemaps": item_by_lang_i18n_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "item-by-lang-alternates/i18n.xml", + views.sitemap, + {"sitemaps": item_by_lang_alternates_i18n_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "lastmod-sitemaps/descending.xml", + views.sitemap, + {"sitemaps": sitemaps_lastmod_descending}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "lastmod/get-latest-lastmod-none-sitemap.xml", + views.index, + {"sitemaps": get_latest_lastmod_none_sitemaps}, + name="django.contrib.sitemaps.views.index", + ), + path( + "lastmod/get-latest-lastmod-sitemap.xml", + views.index, + {"sitemaps": get_latest_lastmod_sitemaps}, + name="django.contrib.sitemaps.views.index", + ), + path( + "lastmod/latest-lastmod-timezone-sitemap.xml", + views.index, + {"sitemaps": latest_lastmod_timezone_sitemaps}, + name="django.contrib.sitemaps.views.index", + ), + path( + "generic/sitemap.xml", + views.sitemap, + {"sitemaps": generic_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "generic-lastmod/sitemap.xml", + views.sitemap, + {"sitemaps": generic_sitemaps_lastmod}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "cached/index.xml", + cache_page(1)(views.index), + {"sitemaps": simple_sitemaps, "sitemap_url_name": "cached_sitemap"}, + ), + path( + "cached/sitemap-
    .xml", + cache_page(1)(views.sitemap), + {"sitemaps": simple_sitemaps}, + name="cached_sitemap", + ), + path( + "sitemap-without-entries/sitemap.xml", + views.sitemap, + {"sitemaps": {}}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "callable-lastmod-partial/index.xml", + views.index, + {"sitemaps": callable_lastmod_partial_sitemap}, + ), + path( + "callable-lastmod-partial/sitemap.xml", + views.sitemap, + {"sitemaps": callable_lastmod_partial_sitemap}, + ), + path( + "callable-lastmod-full/index.xml", + views.index, + {"sitemaps": callable_lastmod_full_sitemap}, + ), + path( + "callable-lastmod-full/sitemap.xml", + views.sitemap, + {"sitemaps": callable_lastmod_full_sitemap}, + ), + path( + "callable-lastmod-no-items/index.xml", + views.index, + {"sitemaps": callable_lastmod_no_items_sitemap}, + ), + path( + "generic-lastmod/index.xml", + views.index, + {"sitemaps": generic_sitemaps_lastmod}, + name="django.contrib.sitemaps.views.index", + ), +] + +urlpatterns += i18n_patterns( + path("i18n/testmodel//", testmodelview, name="i18n_testmodel"), +) diff --git a/testbed/django__django/tests/sitemaps_tests/urls/https.py b/testbed/django__django/tests/sitemaps_tests/urls/https.py new file mode 100644 index 0000000000000000000000000000000000000000..da7ecaf5db270550508a945959d1acdd5e9c3e58 --- /dev/null +++ b/testbed/django__django/tests/sitemaps_tests/urls/https.py @@ -0,0 +1,23 @@ +from django.contrib.sitemaps import views +from django.urls import path + +from .http import SimpleSitemap + + +class HTTPSSitemap(SimpleSitemap): + protocol = "https" + + +secure_sitemaps = { + "simple": HTTPSSitemap, +} + +urlpatterns = [ + path("secure/index.xml", views.index, {"sitemaps": secure_sitemaps}), + path( + "secure/sitemap-
    .xml", + views.sitemap, + {"sitemaps": secure_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), +] diff --git a/testbed/django__django/tests/sites_framework/__init__.py b/testbed/django__django/tests/sites_framework/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/sites_framework/migrations/0001_initial.py b/testbed/django__django/tests/sites_framework/migrations/0001_initial.py new file mode 100644 index 0000000000000000000000000000000000000000..c5721ee08e38a740a305cc506041725f4ac8cadb --- /dev/null +++ b/testbed/django__django/tests/sites_framework/migrations/0001_initial.py @@ -0,0 +1,73 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("sites", "0001_initial"), + ] + + operations = [ + migrations.CreateModel( + name="CustomArticle", + fields=[ + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("title", models.CharField(max_length=50)), + ( + "places_this_article_should_appear", + models.ForeignKey("sites.Site", models.CASCADE), + ), + ], + options={ + "abstract": False, + }, + bases=(models.Model,), + ), + migrations.CreateModel( + name="ExclusiveArticle", + fields=[ + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("title", models.CharField(max_length=50)), + ("site", models.ForeignKey("sites.Site", models.CASCADE)), + ], + options={ + "abstract": False, + }, + bases=(models.Model,), + ), + migrations.CreateModel( + name="SyndicatedArticle", + fields=[ + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("title", models.CharField(max_length=50)), + ("sites", models.ManyToManyField("sites.Site")), + ], + options={ + "abstract": False, + }, + bases=(models.Model,), + ), + ] diff --git a/testbed/django__django/tests/sites_framework/migrations/__init__.py b/testbed/django__django/tests/sites_framework/migrations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/sites_framework/models.py b/testbed/django__django/tests/sites_framework/models.py new file mode 100644 index 0000000000000000000000000000000000000000..fcedbe295f80be5a399cf6da1f08bcf3de2d2759 --- /dev/null +++ b/testbed/django__django/tests/sites_framework/models.py @@ -0,0 +1,28 @@ +from django.contrib.sites.managers import CurrentSiteManager +from django.contrib.sites.models import Site +from django.db import models + + +class AbstractArticle(models.Model): + title = models.CharField(max_length=50) + + objects = models.Manager() + on_site = CurrentSiteManager() + + class Meta: + abstract = True + + +class SyndicatedArticle(AbstractArticle): + sites = models.ManyToManyField(Site) + + +class ExclusiveArticle(AbstractArticle): + site = models.ForeignKey(Site, models.CASCADE) + + +class CustomArticle(AbstractArticle): + places_this_article_should_appear = models.ForeignKey(Site, models.CASCADE) + + objects = models.Manager() + on_site = CurrentSiteManager("places_this_article_should_appear") diff --git a/testbed/django__django/tests/sites_framework/tests.py b/testbed/django__django/tests/sites_framework/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..4a297a924349d1d70af6dd08a072b42b4914de14 --- /dev/null +++ b/testbed/django__django/tests/sites_framework/tests.py @@ -0,0 +1,75 @@ +from django.conf import settings +from django.contrib.sites.managers import CurrentSiteManager +from django.contrib.sites.models import Site +from django.core import checks +from django.db import models +from django.test import SimpleTestCase, TestCase +from django.test.utils import isolate_apps + +from .models import CustomArticle, ExclusiveArticle, SyndicatedArticle + + +class SitesFrameworkTestCase(TestCase): + @classmethod + def setUpTestData(cls): + Site.objects.get_or_create( + id=settings.SITE_ID, domain="example.com", name="example.com" + ) + Site.objects.create( + id=settings.SITE_ID + 1, domain="example2.com", name="example2.com" + ) + + def test_site_fk(self): + article = ExclusiveArticle.objects.create( + title="Breaking News!", site_id=settings.SITE_ID + ) + self.assertEqual(ExclusiveArticle.on_site.get(), article) + + def test_sites_m2m(self): + article = SyndicatedArticle.objects.create(title="Fresh News!") + article.sites.add(Site.objects.get(id=settings.SITE_ID)) + article.sites.add(Site.objects.get(id=settings.SITE_ID + 1)) + article2 = SyndicatedArticle.objects.create(title="More News!") + article2.sites.add(Site.objects.get(id=settings.SITE_ID + 1)) + self.assertEqual(SyndicatedArticle.on_site.get(), article) + + def test_custom_named_field(self): + article = CustomArticle.objects.create( + title="Tantalizing News!", + places_this_article_should_appear_id=settings.SITE_ID, + ) + self.assertEqual(CustomArticle.on_site.get(), article) + + +@isolate_apps("sites_framework") +class CurrentSiteManagerChecksTests(SimpleTestCase): + def test_invalid_name(self): + class InvalidArticle(models.Model): + on_site = CurrentSiteManager("places_this_article_should_appear") + + errors = InvalidArticle.check() + expected = [ + checks.Error( + "CurrentSiteManager could not find a field named " + "'places_this_article_should_appear'.", + obj=InvalidArticle.on_site, + id="sites.E001", + ) + ] + self.assertEqual(errors, expected) + + def test_invalid_field_type(self): + class ConfusedArticle(models.Model): + site = models.IntegerField() + on_site = CurrentSiteManager() + + errors = ConfusedArticle.check() + expected = [ + checks.Error( + "CurrentSiteManager cannot use 'ConfusedArticle.site' as it is " + "not a foreign key or a many-to-many field.", + obj=ConfusedArticle.on_site, + id="sites.E002", + ) + ] + self.assertEqual(errors, expected) diff --git a/testbed/django__django/tests/sites_tests/__init__.py b/testbed/django__django/tests/sites_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/sites_tests/tests.py b/testbed/django__django/tests/sites_tests/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..f0ac9dc2ece6ca961b50e58c556884743efad034 --- /dev/null +++ b/testbed/django__django/tests/sites_tests/tests.py @@ -0,0 +1,353 @@ +from django.apps import apps +from django.apps.registry import Apps +from django.conf import settings +from django.contrib.sites import models +from django.contrib.sites.checks import check_site_id +from django.contrib.sites.management import create_default_site +from django.contrib.sites.middleware import CurrentSiteMiddleware +from django.contrib.sites.models import Site, clear_site_cache +from django.contrib.sites.requests import RequestSite +from django.contrib.sites.shortcuts import get_current_site +from django.core import checks +from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django.db.models.signals import post_migrate +from django.http import HttpRequest, HttpResponse +from django.test import SimpleTestCase, TestCase, modify_settings, override_settings +from django.test.utils import captured_stdout + + +@modify_settings(INSTALLED_APPS={"append": "django.contrib.sites"}) +class SitesFrameworkTests(TestCase): + databases = {"default", "other"} + + @classmethod + def setUpTestData(cls): + cls.site = Site(id=settings.SITE_ID, domain="example.com", name="example.com") + cls.site.save() + + def setUp(self): + Site.objects.clear_cache() + + def tearDown(self): + Site.objects.clear_cache() + + def test_site_manager(self): + # Make sure that get_current() does not return a deleted Site object. + s = Site.objects.get_current() + self.assertIsInstance(s, Site) + s.delete() + with self.assertRaises(ObjectDoesNotExist): + Site.objects.get_current() + + def test_site_cache(self): + # After updating a Site object (e.g. via the admin), we shouldn't return a + # bogus value from the SITE_CACHE. + site = Site.objects.get_current() + self.assertEqual("example.com", site.name) + s2 = Site.objects.get(id=settings.SITE_ID) + s2.name = "Example site" + s2.save() + site = Site.objects.get_current() + self.assertEqual("Example site", site.name) + + def test_delete_all_sites_clears_cache(self): + # When all site objects are deleted the cache should also + # be cleared and get_current() should raise a DoesNotExist. + self.assertIsInstance(Site.objects.get_current(), Site) + Site.objects.all().delete() + with self.assertRaises(Site.DoesNotExist): + Site.objects.get_current() + + @override_settings(ALLOWED_HOSTS=["example.com"]) + def test_get_current_site(self): + # The correct Site object is returned + request = HttpRequest() + request.META = { + "SERVER_NAME": "example.com", + "SERVER_PORT": "80", + } + site = get_current_site(request) + self.assertIsInstance(site, Site) + self.assertEqual(site.id, settings.SITE_ID) + + # An exception is raised if the sites framework is installed + # but there is no matching Site + site.delete() + with self.assertRaises(ObjectDoesNotExist): + get_current_site(request) + + # A RequestSite is returned if the sites framework is not installed + with self.modify_settings(INSTALLED_APPS={"remove": "django.contrib.sites"}): + site = get_current_site(request) + self.assertIsInstance(site, RequestSite) + self.assertEqual(site.name, "example.com") + + @override_settings(SITE_ID=None, ALLOWED_HOSTS=["example.com"]) + def test_get_current_site_no_site_id(self): + request = HttpRequest() + request.META = { + "SERVER_NAME": "example.com", + "SERVER_PORT": "80", + } + del settings.SITE_ID + site = get_current_site(request) + self.assertEqual(site.name, "example.com") + + @override_settings(SITE_ID=None, ALLOWED_HOSTS=["example.com"]) + def test_get_current_site_host_with_trailing_dot(self): + """ + The site is matched if the name in the request has a trailing dot. + """ + request = HttpRequest() + request.META = { + "SERVER_NAME": "example.com.", + "SERVER_PORT": "80", + } + site = get_current_site(request) + self.assertEqual(site.name, "example.com") + + @override_settings(SITE_ID=None, ALLOWED_HOSTS=["example.com", "example.net"]) + def test_get_current_site_no_site_id_and_handle_port_fallback(self): + request = HttpRequest() + s1 = self.site + s2 = Site.objects.create(domain="example.com:80", name="example.com:80") + + # Host header without port + request.META = {"HTTP_HOST": "example.com"} + site = get_current_site(request) + self.assertEqual(site, s1) + + # Host header with port - match, no fallback without port + request.META = {"HTTP_HOST": "example.com:80"} + site = get_current_site(request) + self.assertEqual(site, s2) + + # Host header with port - no match, fallback without port + request.META = {"HTTP_HOST": "example.com:81"} + site = get_current_site(request) + self.assertEqual(site, s1) + + # Host header with non-matching domain + request.META = {"HTTP_HOST": "example.net"} + with self.assertRaises(ObjectDoesNotExist): + get_current_site(request) + + # Ensure domain for RequestSite always matches host header + with self.modify_settings(INSTALLED_APPS={"remove": "django.contrib.sites"}): + request.META = {"HTTP_HOST": "example.com"} + site = get_current_site(request) + self.assertEqual(site.name, "example.com") + + request.META = {"HTTP_HOST": "example.com:80"} + site = get_current_site(request) + self.assertEqual(site.name, "example.com:80") + + def test_domain_name_with_whitespaces(self): + # Regression for #17320 + # Domain names are not allowed contain whitespace characters + site = Site(name="test name", domain="test test") + with self.assertRaises(ValidationError): + site.full_clean() + site.domain = "test\ttest" + with self.assertRaises(ValidationError): + site.full_clean() + site.domain = "test\ntest" + with self.assertRaises(ValidationError): + site.full_clean() + + @override_settings(ALLOWED_HOSTS=["example.com"]) + def test_clear_site_cache(self): + request = HttpRequest() + request.META = { + "SERVER_NAME": "example.com", + "SERVER_PORT": "80", + } + self.assertEqual(models.SITE_CACHE, {}) + get_current_site(request) + expected_cache = {self.site.id: self.site} + self.assertEqual(models.SITE_CACHE, expected_cache) + + with self.settings(SITE_ID=None): + get_current_site(request) + + expected_cache.update({self.site.domain: self.site}) + self.assertEqual(models.SITE_CACHE, expected_cache) + + clear_site_cache(Site, instance=self.site, using="default") + self.assertEqual(models.SITE_CACHE, {}) + + @override_settings(SITE_ID=None, ALLOWED_HOSTS=["example2.com"]) + def test_clear_site_cache_domain(self): + site = Site.objects.create(name="example2.com", domain="example2.com") + request = HttpRequest() + request.META = { + "SERVER_NAME": "example2.com", + "SERVER_PORT": "80", + } + get_current_site(request) # prime the models.SITE_CACHE + expected_cache = {site.domain: site} + self.assertEqual(models.SITE_CACHE, expected_cache) + + # Site exists in 'default' database so using='other' shouldn't clear. + clear_site_cache(Site, instance=site, using="other") + self.assertEqual(models.SITE_CACHE, expected_cache) + # using='default' should clear. + clear_site_cache(Site, instance=site, using="default") + self.assertEqual(models.SITE_CACHE, {}) + + def test_unique_domain(self): + site = Site(domain=self.site.domain) + msg = "Site with this Domain name already exists." + with self.assertRaisesMessage(ValidationError, msg): + site.validate_unique() + + def test_site_natural_key(self): + self.assertEqual(Site.objects.get_by_natural_key(self.site.domain), self.site) + self.assertEqual(self.site.natural_key(), (self.site.domain,)) + + @override_settings(SITE_ID="1") + def test_check_site_id(self): + self.assertEqual( + check_site_id(None), + [ + checks.Error( + msg="The SITE_ID setting must be an integer", + id="sites.E101", + ), + ], + ) + + def test_valid_site_id(self): + for site_id in [1, None]: + with self.subTest(site_id=site_id), self.settings(SITE_ID=site_id): + self.assertEqual(check_site_id(None), []) + + +@override_settings(ALLOWED_HOSTS=["example.com"]) +class RequestSiteTests(SimpleTestCase): + def setUp(self): + request = HttpRequest() + request.META = {"HTTP_HOST": "example.com"} + self.site = RequestSite(request) + + def test_init_attributes(self): + self.assertEqual(self.site.domain, "example.com") + self.assertEqual(self.site.name, "example.com") + + def test_str(self): + self.assertEqual(str(self.site), "example.com") + + def test_save(self): + msg = "RequestSite cannot be saved." + with self.assertRaisesMessage(NotImplementedError, msg): + self.site.save() + + def test_delete(self): + msg = "RequestSite cannot be deleted." + with self.assertRaisesMessage(NotImplementedError, msg): + self.site.delete() + + +class JustOtherRouter: + def allow_migrate(self, db, app_label, **hints): + return db == "other" + + +@modify_settings(INSTALLED_APPS={"append": "django.contrib.sites"}) +class CreateDefaultSiteTests(TestCase): + databases = {"default", "other"} + + @classmethod + def setUpTestData(cls): + # Delete the site created as part of the default migration process. + Site.objects.all().delete() + + def setUp(self): + self.app_config = apps.get_app_config("sites") + + def test_basic(self): + """ + #15346, #15573 - create_default_site() creates an example site only if + none exist. + """ + with captured_stdout() as stdout: + create_default_site(self.app_config) + self.assertEqual(Site.objects.count(), 1) + self.assertIn("Creating example.com", stdout.getvalue()) + + with captured_stdout() as stdout: + create_default_site(self.app_config) + self.assertEqual(Site.objects.count(), 1) + self.assertEqual("", stdout.getvalue()) + + @override_settings(DATABASE_ROUTERS=[JustOtherRouter()]) + def test_multi_db_with_router(self): + """ + #16353, #16828 - The default site creation should respect db routing. + """ + create_default_site(self.app_config, using="default", verbosity=0) + create_default_site(self.app_config, using="other", verbosity=0) + self.assertFalse(Site.objects.using("default").exists()) + self.assertTrue(Site.objects.using("other").exists()) + + def test_multi_db(self): + create_default_site(self.app_config, using="default", verbosity=0) + create_default_site(self.app_config, using="other", verbosity=0) + self.assertTrue(Site.objects.using("default").exists()) + self.assertTrue(Site.objects.using("other").exists()) + + def test_save_another(self): + """ + #17415 - Another site can be created right after the default one. + + On some backends the sequence needs to be reset after saving with an + explicit ID. There shouldn't be a sequence collisions by saving another + site. This test is only meaningful with databases that use sequences + for automatic primary keys such as PostgreSQL and Oracle. + """ + create_default_site(self.app_config, verbosity=0) + Site(domain="example2.com", name="example2.com").save() + + def test_signal(self): + """ + #23641 - Sending the ``post_migrate`` signal triggers creation of the + default site. + """ + post_migrate.send( + sender=self.app_config, app_config=self.app_config, verbosity=0 + ) + self.assertTrue(Site.objects.exists()) + + @override_settings(SITE_ID=35696) + def test_custom_site_id(self): + """ + #23945 - The configured ``SITE_ID`` should be respected. + """ + create_default_site(self.app_config, verbosity=0) + self.assertEqual(Site.objects.get().pk, 35696) + + @override_settings() # Restore original ``SITE_ID`` afterward. + def test_no_site_id(self): + """ + #24488 - The pk should default to 1 if no ``SITE_ID`` is configured. + """ + del settings.SITE_ID + create_default_site(self.app_config, verbosity=0) + self.assertEqual(Site.objects.get().pk, 1) + + def test_unavailable_site_model(self): + """ + #24075 - A Site shouldn't be created if the model isn't available. + """ + apps = Apps() + create_default_site(self.app_config, verbosity=0, apps=apps) + self.assertFalse(Site.objects.exists()) + + +class MiddlewareTest(TestCase): + def test_request(self): + def get_response(request): + return HttpResponse(str(request.site.id)) + + response = CurrentSiteMiddleware(get_response)(HttpRequest()) + self.assertContains(response, settings.SITE_ID) diff --git a/testbed/django__django/tests/staticfiles_tests/__init__.py b/testbed/django__django/tests/staticfiles_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/staticfiles_tests/apps/__init__.py b/testbed/django__django/tests/staticfiles_tests/apps/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/staticfiles_tests/apps/no_label/__init__.py b/testbed/django__django/tests/staticfiles_tests/apps/no_label/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/staticfiles_tests/apps/no_label/static/file2.txt b/testbed/django__django/tests/staticfiles_tests/apps/no_label/static/file2.txt new file mode 100644 index 0000000000000000000000000000000000000000..aa264cab9bb3d06e46a7124abad4df7602df692b --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/apps/no_label/static/file2.txt @@ -0,0 +1 @@ +file2 in no_label_app diff --git a/testbed/django__django/tests/staticfiles_tests/apps/staticfiles_config.py b/testbed/django__django/tests/staticfiles_tests/apps/staticfiles_config.py new file mode 100644 index 0000000000000000000000000000000000000000..cf2147f1fa6549b68788a8e944a3491f6c1b713f --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/apps/staticfiles_config.py @@ -0,0 +1,5 @@ +from django.contrib.staticfiles.apps import StaticFilesConfig + + +class IgnorePatternsAppConfig(StaticFilesConfig): + ignore_patterns = ["*.css", "*/vendor/*.js"] diff --git a/testbed/django__django/tests/staticfiles_tests/apps/test/__init__.py b/testbed/django__django/tests/staticfiles_tests/apps/test/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/staticfiles_tests/apps/test/otherdir/odfile.txt b/testbed/django__django/tests/staticfiles_tests/apps/test/otherdir/odfile.txt new file mode 100644 index 0000000000000000000000000000000000000000..c62c93d1622a53443cc4ca71263845c8418213d2 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/apps/test/otherdir/odfile.txt @@ -0,0 +1 @@ +File in otherdir. diff --git a/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/%2F.txt b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/%2F.txt new file mode 100644 index 0000000000000000000000000000000000000000..d98b646c7c665852f7be3457aec7b0ef64e614bb --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/%2F.txt @@ -0,0 +1 @@ +%2F content diff --git a/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/.hidden b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/.hidden new file mode 100644 index 0000000000000000000000000000000000000000..a4de8e4b518518e78dca160cc85b5930b1c55c0b --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/.hidden @@ -0,0 +1 @@ +should be ignored diff --git a/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/CVS b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/CVS new file mode 100644 index 0000000000000000000000000000000000000000..cef6c23575aa0c0d3190fd54c1250b166689740f --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/CVS @@ -0,0 +1 @@ +This file should be ignored. diff --git a/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/file.txt b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/file.txt new file mode 100644 index 0000000000000000000000000000000000000000..f4754f7310d2faec993a06d70fb56f749386e47d --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/file.txt @@ -0,0 +1 @@ +In static directory. diff --git a/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/file1.txt b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/file1.txt new file mode 100644 index 0000000000000000000000000000000000000000..9f9a8d92abd3d8e2a9b89eb6285ba67b99a244c3 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/file1.txt @@ -0,0 +1 @@ +file1 in the app dir \ No newline at end of file diff --git a/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/nonascii.css b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/nonascii.css new file mode 100644 index 0000000000000000000000000000000000000000..a5358f6edeaa3170fe8b90b99462d202dc45ff3e --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/nonascii.css @@ -0,0 +1,5 @@ +body { + background: url('window.png'); +} + +.snowman:before { content: "☃"; } diff --git a/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/test.ignoreme b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/test.ignoreme new file mode 100644 index 0000000000000000000000000000000000000000..cef6c23575aa0c0d3190fd54c1250b166689740f --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/test.ignoreme @@ -0,0 +1 @@ +This file should be ignored. diff --git a/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/vendor/module.js b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/vendor/module.js new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git "a/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/\342\212\227.txt" "b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/\342\212\227.txt" new file mode 100644 index 0000000000000000000000000000000000000000..598ebdd978ec9b37ade5e03ec495f8a757fafc2a --- /dev/null +++ "b/testbed/django__django/tests/staticfiles_tests/apps/test/static/test/\342\212\227.txt" @@ -0,0 +1 @@ +⊗ in the app dir diff --git a/testbed/django__django/tests/staticfiles_tests/cases.py b/testbed/django__django/tests/staticfiles_tests/cases.py new file mode 100644 index 0000000000000000000000000000000000000000..5e8b150b331cf06c7c4762cc349b521577bd7723 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/cases.py @@ -0,0 +1,148 @@ +import os +import shutil +import tempfile + +from django.conf import settings +from django.core.management import call_command +from django.template import Context, Template +from django.test import SimpleTestCase, override_settings + +from .settings import TEST_SETTINGS + + +class BaseStaticFilesMixin: + """ + Test case with a couple utility assertions. + """ + + def assertFileContains(self, filepath, text): + self.assertIn( + text, + self._get_file(filepath), + "'%s' not in '%s'" % (text, filepath), + ) + + def assertFileNotFound(self, filepath): + with self.assertRaises(OSError): + self._get_file(filepath) + + def render_template(self, template, **kwargs): + if isinstance(template, str): + template = Template(template) + return template.render(Context(**kwargs)).strip() + + def static_template_snippet(self, path, asvar=False): + if asvar: + return ( + "{%% load static from static %%}{%% static '%s' as var %%}{{ var }}" + % path + ) + return "{%% load static from static %%}{%% static '%s' %%}" % path + + def assertStaticRenders(self, path, result, asvar=False, **kwargs): + template = self.static_template_snippet(path, asvar) + self.assertEqual(self.render_template(template, **kwargs), result) + + def assertStaticRaises(self, exc, path, result, asvar=False, **kwargs): + with self.assertRaises(exc): + self.assertStaticRenders(path, result, **kwargs) + + +@override_settings(**TEST_SETTINGS) +class StaticFilesTestCase(BaseStaticFilesMixin, SimpleTestCase): + pass + + +@override_settings(**TEST_SETTINGS) +class CollectionTestCase(BaseStaticFilesMixin, SimpleTestCase): + """ + Tests shared by all file finding features (collectstatic, + findstatic, and static serve view). + + This relies on the asserts defined in BaseStaticFilesTestCase, but + is separated because some test cases need those asserts without + all these tests. + """ + + run_collectstatic_in_setUp = True + + def setUp(self): + super().setUp() + temp_dir = self.mkdtemp() + # Override the STATIC_ROOT for all tests from setUp to tearDown + # rather than as a context manager + self.patched_settings = self.settings(STATIC_ROOT=temp_dir) + self.patched_settings.enable() + if self.run_collectstatic_in_setUp: + self.run_collectstatic() + # Same comment as in runtests.teardown. + self.addCleanup(shutil.rmtree, temp_dir) + + def tearDown(self): + self.patched_settings.disable() + super().tearDown() + + def mkdtemp(self): + return tempfile.mkdtemp() + + def run_collectstatic(self, *, verbosity=0, **kwargs): + call_command( + "collectstatic", + interactive=False, + verbosity=verbosity, + ignore_patterns=["*.ignoreme"], + **kwargs, + ) + + def _get_file(self, filepath): + assert filepath, "filepath is empty." + filepath = os.path.join(settings.STATIC_ROOT, filepath) + with open(filepath, encoding="utf-8") as f: + return f.read() + + +class TestDefaults: + """ + A few standard test cases. + """ + + def test_staticfiles_dirs(self): + """ + Can find a file in a STATICFILES_DIRS directory. + """ + self.assertFileContains("test.txt", "Can we find") + self.assertFileContains(os.path.join("prefix", "test.txt"), "Prefix") + + def test_staticfiles_dirs_subdir(self): + """ + Can find a file in a subdirectory of a STATICFILES_DIRS + directory. + """ + self.assertFileContains("subdir/test.txt", "Can we find") + + def test_staticfiles_dirs_priority(self): + """ + File in STATICFILES_DIRS has priority over file in app. + """ + self.assertFileContains("test/file.txt", "STATICFILES_DIRS") + + def test_app_files(self): + """ + Can find a file in an app static/ directory. + """ + self.assertFileContains("test/file1.txt", "file1 in the app dir") + + def test_nonascii_filenames(self): + """ + Can find a file with non-ASCII character in an app static/ directory. + """ + self.assertFileContains("test/⊗.txt", "⊗ in the app dir") + + def test_camelcase_filenames(self): + """ + Can find a file with capital letters. + """ + self.assertFileContains("test/camelCase.txt", "camelCase") + + def test_filename_with_percent_sign(self): + self.assertFileContains("test/%2F.txt", "%2F content") diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/absolute_root.css b/testbed/django__django/tests/staticfiles_tests/project/documents/absolute_root.css new file mode 100644 index 0000000000000000000000000000000000000000..b35ff47c9e26d392969c30e22f1e020181412545 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/absolute_root.css @@ -0,0 +1 @@ +@import url("/static/styles_root.css"); diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/absolute_root.js b/testbed/django__django/tests/staticfiles_tests/project/documents/absolute_root.js new file mode 100644 index 0000000000000000000000000000000000000000..4561b0389b725279dd68dba9d0db5bbee2d8f103 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/absolute_root.js @@ -0,0 +1,2 @@ +const rootConst = "root"; +export default rootConst; diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/absolute.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/absolute.css new file mode 100644 index 0000000000000000000000000000000000000000..6a2040b413eb1ef1ede0e289357792a48f894fb9 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/absolute.css @@ -0,0 +1,5 @@ +@import url("/static/cached/styles.css"); +@import url("/static/styles_root.css"); +body { + background: #d3d6d8 url(/static/cached/img/relative.png); +} diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/fonts/font.eot b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/fonts/font.eot new file mode 100644 index 0000000000000000000000000000000000000000..fdd7138c52ba71534ab1c6d513e56dd40ea8bccd --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/fonts/font.eot @@ -0,0 +1 @@ +not really an EOT ;) \ No newline at end of file diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/fonts/font.svg b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/fonts/font.svg new file mode 100644 index 0000000000000000000000000000000000000000..028237591530971e3adb0f9fddafeedb65a8dbe6 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/fonts/font.svg @@ -0,0 +1 @@ +not really a SVG ;) \ No newline at end of file diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/fragments.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/fragments.css new file mode 100644 index 0000000000000000000000000000000000000000..533d7617aae1b9d2b61d4f866f6fa8973b480dfa --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/fragments.css @@ -0,0 +1,9 @@ +@font-face { + src: url('fonts/font.eot?#iefix') format('embedded-opentype'), + url('fonts/font.svg#webfontIyfZbseF') format('svg'); + url('fonts/font.svg#path/to/../../fonts/font.svg') format('svg'); + url('data:font/woff;charset=utf-8;base64,d09GRgABAAAAADJoAA0AAAAAR2QAAQAAAAAAAAAAAAA'); +} +div { + behavior: url("#default#VML"); +} diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/ignored.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/ignored.css new file mode 100644 index 0000000000000000000000000000000000000000..70a8cb918a5d3f4ea12b8b3deff3ffcb7402534a --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/ignored.css @@ -0,0 +1,10 @@ +body { + background: url("#foobar"); + background: url("http:foobar"); + background: url("https:foobar"); + background: url("data:foobar"); + background: url("chrome:foobar"); + background: url("//foobar"); + background: url(); +} + diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/window.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/window.css new file mode 100644 index 0000000000000000000000000000000000000000..770e4001e62cb63100333e4658a134bbd21240c1 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/css/window.css @@ -0,0 +1,3 @@ +body { + background: #d3d6d8 url("img/window.png"); +} diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/import.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/import.css new file mode 100644 index 0000000000000000000000000000000000000000..6bc7ce04c48bfa036a425e9f62110ef5b48d65e0 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/import.css @@ -0,0 +1 @@ +@import 'styles.css'; diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/module.js b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/module.js new file mode 100644 index 0000000000000000000000000000000000000000..7764e740d6973298c2e40d10d02c9f76a78da6a9 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/module.js @@ -0,0 +1,22 @@ +// Static imports. +import rootConst from "/static/absolute_root.js"; +import testConst from "./module_test.js"; +import * as NewModule from "./module_test.js"; +import { testConst as alias } from "./module_test.js"; +import { firstConst, secondConst } from "./module_test.js"; +import { + firstVar1 as firstVarAlias, + $second_var_2 as secondVarAlias +} from "./module_test.js"; +import relativeModule from "../nested/js/nested.js"; + +// Dynamic imports. +const dynamicModule = import("./module_test.js"); + +// Modules exports to aggregate modules. +export * from "./module_test.js"; +export { testConst } from "./module_test.js"; +export { + firstVar as firstVarAlias, + secondVar as secondVarAlias +} from "./module_test.js"; diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/module_test.js b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/module_test.js new file mode 100644 index 0000000000000000000000000000000000000000..219372f891f8848599e5621eb1ce129265e1a35c --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/module_test.js @@ -0,0 +1,5 @@ +export const testConst = "test"; +export const firstConst = "first"; +export const secondConst = "second"; +export var firstVar1 = "test_1"; +export var SecondVar2 = "test_2"; diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/other.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/other.css new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/relative.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/relative.css new file mode 100644 index 0000000000000000000000000000000000000000..68995c4feddd2058b4475de542a49dd0a5f6d9ef --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/relative.css @@ -0,0 +1,6 @@ +@import url("../cached/styles.css"); +@import url("absolute.css"); +@import url("absolute.css#eggs"); +body { + background: #d3d6d8 url(img/relative.png); +} diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.css new file mode 100644 index 0000000000000000000000000000000000000000..a07d612403c1fda1e9347c97db64294a84b173bd --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.css @@ -0,0 +1,2 @@ +* {outline: 1px solid red;} +/*# sourceMappingURL=source_map.css.map*/ diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.css.map b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.css.map new file mode 100644 index 0000000000000000000000000000000000000000..9e26dfeeb6e641a33dae4961196235bdb965b21b --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.css.map @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.js b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.js new file mode 100644 index 0000000000000000000000000000000000000000..9d417868a087ba0bd79700ca997c2019840c0713 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.js @@ -0,0 +1,2 @@ +//# sourceMappingURL=source_map.js.map +let a_variable = 1; diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.js.map b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.js.map new file mode 100644 index 0000000000000000000000000000000000000000..9e26dfeeb6e641a33dae4961196235bdb965b21b --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map.js.map @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_data_uri.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_data_uri.css new file mode 100644 index 0000000000000000000000000000000000000000..7b6bc63241141e7ed9798550be24bdf34ae50614 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_data_uri.css @@ -0,0 +1,2 @@ +* {outline: 1px solid red;} +/*# sourceMappingURL=data:application/json;charset=utf8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIl9zcmMv*/ diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_data_uri.js b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_data_uri.js new file mode 100644 index 0000000000000000000000000000000000000000..316fb1fecd42c6993d8d4cb7454febaaadc5815a --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_data_uri.js @@ -0,0 +1,2 @@ +//# sourceMappingURL=data:application/json;charset=utf8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIl9zcmMv +let a_variable = 1; diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_sensitive.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_sensitive.css new file mode 100644 index 0000000000000000000000000000000000000000..a1b23bb316d85eb8439df44ed201bca144c7c271 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_sensitive.css @@ -0,0 +1,2 @@ +* {outline: 1px solid red;} +/*# sOuRcEMaPpInGURL=source_map.css.map */ diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_sensitive.js b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_sensitive.js new file mode 100644 index 0000000000000000000000000000000000000000..d60c76b4f6ae3db36becbe52008c0dec753d374e --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_sensitive.js @@ -0,0 +1 @@ +//# sOuRcEMaPpInGURL=source_map.js.map diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_tabs.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_tabs.css new file mode 100644 index 0000000000000000000000000000000000000000..3ae2073dc8316493e11b4c4287596c723490c5f2 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_tabs.css @@ -0,0 +1,2 @@ +* {outline: 1px solid red;} +/*# sourceMappingURL=source_map.css.map */ diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_trailing_whitespace.js b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_trailing_whitespace.js new file mode 100644 index 0000000000000000000000000000000000000000..e15bc402a953b9d71f9b4cf640e230e1b415fd0b --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/source_map_trailing_whitespace.js @@ -0,0 +1,2 @@ +//# sourceMappingURL=source_map.js.map +let a_variable = 1; diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/styles.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/styles.css new file mode 100644 index 0000000000000000000000000000000000000000..68896f4973ac63549139507c9637494e0e2301e4 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/styles.css @@ -0,0 +1 @@ +@import url("other.css"); diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/styles_insensitive.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/styles_insensitive.css new file mode 100644 index 0000000000000000000000000000000000000000..3cd3d0834913856a78243559ac8bdc2316eed8a1 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/styles_insensitive.css @@ -0,0 +1 @@ +@IMporT uRL("other.css"); diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/test.js b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/test.js new file mode 100644 index 0000000000000000000000000000000000000000..3d1c431aff79ebb640d1e628e77bf95eae55dda7 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/test.js @@ -0,0 +1 @@ +myVar = url("import.css"); diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/cached/url.css b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/url.css new file mode 100644 index 0000000000000000000000000000000000000000..c0974475ca88645a27a0b6a29620db69a934a8ab --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/cached/url.css @@ -0,0 +1 @@ +@import url("https://www.djangoproject.com/m/css/base.css"); diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/nested/css/base.css b/testbed/django__django/tests/staticfiles_tests/project/documents/nested/css/base.css new file mode 100644 index 0000000000000000000000000000000000000000..06041ca25f1e58447f1429e5756bd1ac5c314c91 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/nested/css/base.css @@ -0,0 +1 @@ +html {height: 100%;} diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/nested/js/nested.js b/testbed/django__django/tests/staticfiles_tests/project/documents/nested/js/nested.js new file mode 100644 index 0000000000000000000000000000000000000000..7646bbd17d04a03488f955ff0984971bca8f6e20 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/nested/js/nested.js @@ -0,0 +1 @@ +export default null; diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/staticfiles_v1.json b/testbed/django__django/tests/staticfiles_tests/project/documents/staticfiles_v1.json new file mode 100644 index 0000000000000000000000000000000000000000..4f85945e3f2040a45a783076203d803272a36022 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/staticfiles_v1.json @@ -0,0 +1,6 @@ +{ + "version": "1.0", + "paths": { + "dummy.txt": "dummy.txt" + } +} diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/styles_root.css b/testbed/django__django/tests/staticfiles_tests/project/documents/styles_root.css new file mode 100644 index 0000000000000000000000000000000000000000..64512630cb3ef4ca8b23da06a58b30f8dba39807 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/styles_root.css @@ -0,0 +1 @@ +/* see cached/absolute.css and absolute_root.css */ diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/subdir/test.txt b/testbed/django__django/tests/staticfiles_tests/project/documents/subdir/test.txt new file mode 100644 index 0000000000000000000000000000000000000000..04326a212de72f4d5d38e67509a1c837312b17f7 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/subdir/test.txt @@ -0,0 +1 @@ +Can we find this file? diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/test.txt b/testbed/django__django/tests/staticfiles_tests/project/documents/test.txt new file mode 100644 index 0000000000000000000000000000000000000000..04326a212de72f4d5d38e67509a1c837312b17f7 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/test.txt @@ -0,0 +1 @@ +Can we find this file? diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/test/backup~ b/testbed/django__django/tests/staticfiles_tests/project/documents/test/backup~ new file mode 100644 index 0000000000000000000000000000000000000000..a4de8e4b518518e78dca160cc85b5930b1c55c0b --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/test/backup~ @@ -0,0 +1 @@ +should be ignored diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/test/camelCase.txt b/testbed/django__django/tests/staticfiles_tests/project/documents/test/camelCase.txt new file mode 100644 index 0000000000000000000000000000000000000000..b4f8882668e3b008e0403e62841c7b17eb9b957c --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/test/camelCase.txt @@ -0,0 +1 @@ +This file is named with camelCase. \ No newline at end of file diff --git a/testbed/django__django/tests/staticfiles_tests/project/documents/test/file.txt b/testbed/django__django/tests/staticfiles_tests/project/documents/test/file.txt new file mode 100644 index 0000000000000000000000000000000000000000..fdeaa23254e480e99df7dc61aaf9bba442a69abd --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/documents/test/file.txt @@ -0,0 +1,2 @@ +In STATICFILES_DIRS directory. + diff --git a/testbed/django__django/tests/staticfiles_tests/project/faulty/faulty.css b/testbed/django__django/tests/staticfiles_tests/project/faulty/faulty.css new file mode 100644 index 0000000000000000000000000000000000000000..ca57c77e55132d097d45fbe70d568128c1386bbd --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/faulty/faulty.css @@ -0,0 +1 @@ +@import url("missing.css"); diff --git a/testbed/django__django/tests/staticfiles_tests/project/loop/bar.css b/testbed/django__django/tests/staticfiles_tests/project/loop/bar.css new file mode 100644 index 0000000000000000000000000000000000000000..1f11a22cbf8e312c4fea59c5ce22aa8a4e279f20 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/loop/bar.css @@ -0,0 +1 @@ +@import url("foo.css") diff --git a/testbed/django__django/tests/staticfiles_tests/project/loop/foo.css b/testbed/django__django/tests/staticfiles_tests/project/loop/foo.css new file mode 100644 index 0000000000000000000000000000000000000000..0903a708027dd3786ebd6f238133d58b1de5b16a --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/loop/foo.css @@ -0,0 +1 @@ +@import url("bar.css") diff --git a/testbed/django__django/tests/staticfiles_tests/project/nonutf8/nonutf8.css b/testbed/django__django/tests/staticfiles_tests/project/nonutf8/nonutf8.css new file mode 100644 index 0000000000000000000000000000000000000000..4d5e729088337430646d4cf5b6ef4854c092db6d --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/nonutf8/nonutf8.css @@ -0,0 +1,2 @@ +/* viter crasement */ +.test { margin: 1 rem; } diff --git a/testbed/django__django/tests/staticfiles_tests/project/pathlib/pathlib.txt b/testbed/django__django/tests/staticfiles_tests/project/pathlib/pathlib.txt new file mode 100644 index 0000000000000000000000000000000000000000..c7709d3d4111575ae8f6b3f1c4a515f3ba7f307f --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/pathlib/pathlib.txt @@ -0,0 +1 @@ +pathlib diff --git a/testbed/django__django/tests/staticfiles_tests/project/prefixed/test.txt b/testbed/django__django/tests/staticfiles_tests/project/prefixed/test.txt new file mode 100644 index 0000000000000000000000000000000000000000..d3a017895ce66c9c8c2f009c4c50bba5419a03be --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/prefixed/test.txt @@ -0,0 +1 @@ +Prefix! \ No newline at end of file diff --git a/testbed/django__django/tests/staticfiles_tests/project/site_media/media/media-file.txt b/testbed/django__django/tests/staticfiles_tests/project/site_media/media/media-file.txt new file mode 100644 index 0000000000000000000000000000000000000000..466922d07ae45daae4f69b0d6af9a679ff6259ce --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/site_media/media/media-file.txt @@ -0,0 +1 @@ +Media file. diff --git a/testbed/django__django/tests/staticfiles_tests/project/site_media/static/testfile.txt b/testbed/django__django/tests/staticfiles_tests/project/site_media/static/testfile.txt new file mode 100644 index 0000000000000000000000000000000000000000..4d92dbe1adbd164271a00aa96a93dadc9b674aac --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/project/site_media/static/testfile.txt @@ -0,0 +1 @@ +Test! \ No newline at end of file diff --git a/testbed/django__django/tests/staticfiles_tests/settings.py b/testbed/django__django/tests/staticfiles_tests/settings.py new file mode 100644 index 0000000000000000000000000000000000000000..a5fd116c0addf20e6d55798e4ecb37cdd1ee9f7a --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/settings.py @@ -0,0 +1,30 @@ +import os.path +from pathlib import Path + +TEST_ROOT = os.path.dirname(__file__) + +TEST_SETTINGS = { + "MEDIA_URL": "media/", + "STATIC_URL": "static/", + "MEDIA_ROOT": os.path.join(TEST_ROOT, "project", "site_media", "media"), + "STATIC_ROOT": os.path.join(TEST_ROOT, "project", "site_media", "static"), + "STATICFILES_DIRS": [ + os.path.join(TEST_ROOT, "project", "documents"), + ("prefix", os.path.join(TEST_ROOT, "project", "prefixed")), + Path(TEST_ROOT) / "project" / "pathlib", + ], + "STATICFILES_FINDERS": [ + "django.contrib.staticfiles.finders.FileSystemFinder", + "django.contrib.staticfiles.finders.AppDirectoriesFinder", + "django.contrib.staticfiles.finders.DefaultStorageFinder", + ], + "INSTALLED_APPS": [ + "django.contrib.staticfiles", + "staticfiles_tests", + "staticfiles_tests.apps.test", + "staticfiles_tests.apps.no_label", + ], + # In particular, AuthenticationMiddleware can't be used because + # contrib.auth isn't in INSTALLED_APPS. + "MIDDLEWARE": [], +} diff --git a/testbed/django__django/tests/staticfiles_tests/storage.py b/testbed/django__django/tests/staticfiles_tests/storage.py new file mode 100644 index 0000000000000000000000000000000000000000..51614fbb18d04460304505371d742869f6ab7827 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/storage.py @@ -0,0 +1,104 @@ +import os +from datetime import datetime, timedelta, timezone + +from django.conf import settings +from django.contrib.staticfiles.storage import ManifestStaticFilesStorage +from django.core.files import storage + + +class DummyStorage(storage.Storage): + """ + A storage class that implements get_modified_time() but raises + NotImplementedError for path(). + """ + + def _save(self, name, content): + return "dummy" + + def delete(self, name): + pass + + def exists(self, name): + pass + + def get_modified_time(self, name): + return datetime(1970, 1, 1, tzinfo=timezone.utc) + + +class PathNotImplementedStorage(storage.Storage): + def _save(self, name, content): + return "dummy" + + def _path(self, name): + return os.path.join(settings.STATIC_ROOT, name) + + def exists(self, name): + return os.path.exists(self._path(name)) + + def listdir(self, path): + path = self._path(path) + directories, files = [], [] + with os.scandir(path) as entries: + for entry in entries: + if entry.is_dir(): + directories.append(entry.name) + else: + files.append(entry.name) + return directories, files + + def delete(self, name): + name = self._path(name) + try: + os.remove(name) + except FileNotFoundError: + pass + + def path(self, name): + raise NotImplementedError + + +class NeverCopyRemoteStorage(PathNotImplementedStorage): + """ + Return a future modified time for all files so that nothing is collected. + """ + + def get_modified_time(self, name): + return datetime.now() + timedelta(days=30) + + +class QueryStringStorage(storage.Storage): + def url(self, path): + return path + "?a=b&c=d" + + +class SimpleStorage(ManifestStaticFilesStorage): + def file_hash(self, name, content=None): + return "deploy12345" + + +class ExtraPatternsStorage(ManifestStaticFilesStorage): + """ + A storage class to test pattern substitutions with more than one pattern + entry. The added pattern rewrites strings like "url(...)" to JS_URL("..."). + """ + + patterns = tuple(ManifestStaticFilesStorage.patterns) + ( + ( + "*.js", + ( + ( + r"""(?Purl\(['"]{0,1}\s*(?P.*?)["']{0,1}\))""", + 'JS_URL("%(url)s")', + ), + ), + ), + ) + + +class NoneHashStorage(ManifestStaticFilesStorage): + def file_hash(self, name, content=None): + return None + + +class NoPostProcessReplacedPathStorage(ManifestStaticFilesStorage): + max_post_process_passes = 0 diff --git a/testbed/django__django/tests/staticfiles_tests/test_checks.py b/testbed/django__django/tests/staticfiles_tests/test_checks.py new file mode 100644 index 0000000000000000000000000000000000000000..a8c6b78a96f35557195ca56e9b04973de969e072 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/test_checks.py @@ -0,0 +1,134 @@ +from pathlib import Path +from unittest import mock + +from django.conf import settings +from django.contrib.staticfiles.checks import check_finders +from django.contrib.staticfiles.finders import BaseFinder, get_finder +from django.core.checks import Error, Warning +from django.test import override_settings + +from .cases import CollectionTestCase +from .settings import TEST_ROOT + + +class FindersCheckTests(CollectionTestCase): + run_collectstatic_in_setUp = False + + def test_base_finder_check_not_implemented(self): + finder = BaseFinder() + msg = ( + "subclasses may provide a check() method to verify the finder is " + "configured correctly." + ) + with self.assertRaisesMessage(NotImplementedError, msg): + finder.check() + + def test_check_finders(self): + """check_finders() concatenates all errors.""" + error1 = Error("1") + error2 = Error("2") + error3 = Error("3") + + def get_finders(): + class Finder1(BaseFinder): + def check(self, **kwargs): + return [error1] + + class Finder2(BaseFinder): + def check(self, **kwargs): + return [] + + class Finder3(BaseFinder): + def check(self, **kwargs): + return [error2, error3] + + class Finder4(BaseFinder): + pass + + return [Finder1(), Finder2(), Finder3(), Finder4()] + + with mock.patch("django.contrib.staticfiles.checks.get_finders", get_finders): + errors = check_finders(None) + self.assertEqual(errors, [error1, error2, error3]) + + def test_no_errors_with_test_settings(self): + self.assertEqual(check_finders(None), []) + + @override_settings(STATICFILES_DIRS="a string") + def test_dirs_not_tuple_or_list(self): + self.assertEqual( + check_finders(None), + [ + Error( + "The STATICFILES_DIRS setting is not a tuple or list.", + hint="Perhaps you forgot a trailing comma?", + id="staticfiles.E001", + ) + ], + ) + + def test_dirs_contains_static_root(self): + with self.settings(STATICFILES_DIRS=[settings.STATIC_ROOT]): + self.assertEqual( + check_finders(None), + [ + Error( + "The STATICFILES_DIRS setting should not contain the " + "STATIC_ROOT setting.", + id="staticfiles.E002", + ) + ], + ) + + def test_dirs_contains_static_root_in_tuple(self): + with self.settings(STATICFILES_DIRS=[("prefix", settings.STATIC_ROOT)]): + self.assertEqual( + check_finders(None), + [ + Error( + "The STATICFILES_DIRS setting should not contain the " + "STATIC_ROOT setting.", + id="staticfiles.E002", + ) + ], + ) + + def test_prefix_contains_trailing_slash(self): + static_dir = Path(TEST_ROOT) / "project" / "documents" + with self.settings(STATICFILES_DIRS=[("prefix/", static_dir)]): + self.assertEqual( + check_finders(None), + [ + Error( + "The prefix 'prefix/' in the STATICFILES_DIRS setting must " + "not end with a slash.", + id="staticfiles.E003", + ), + ], + ) + + def test_nonexistent_directories(self): + with self.settings( + STATICFILES_DIRS=[ + "/fake/path", + ("prefix", "/fake/prefixed/path"), + ] + ): + self.assertEqual( + check_finders(None), + [ + Warning( + "The directory '/fake/path' in the STATICFILES_DIRS " + "setting does not exist.", + id="staticfiles.W004", + ), + Warning( + "The directory '/fake/prefixed/path' in the " + "STATICFILES_DIRS setting does not exist.", + id="staticfiles.W004", + ), + ], + ) + # Nonexistent directories are skipped. + finder = get_finder("django.contrib.staticfiles.finders.FileSystemFinder") + self.assertEqual(list(finder.list(None)), []) diff --git a/testbed/django__django/tests/staticfiles_tests/test_finders.py b/testbed/django__django/tests/staticfiles_tests/test_finders.py new file mode 100644 index 0000000000000000000000000000000000000000..9f2509d5334966332b0f0d2c7270cc5ff1592df2 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/test_finders.py @@ -0,0 +1,125 @@ +import os + +from django.conf import settings +from django.contrib.staticfiles import finders, storage +from django.core.exceptions import ImproperlyConfigured +from django.test import SimpleTestCase, override_settings + +from .cases import StaticFilesTestCase +from .settings import TEST_ROOT + + +class TestFinders: + """ + Base finder test mixin. + + On Windows, sometimes the case of the path we ask the finders for and the + path(s) they find can differ. Compare them using os.path.normcase() to + avoid false negatives. + """ + + def test_find_first(self): + src, dst = self.find_first + found = self.finder.find(src) + self.assertEqual(os.path.normcase(found), os.path.normcase(dst)) + + def test_find_all(self): + src, dst = self.find_all + found = self.finder.find(src, all=True) + found = [os.path.normcase(f) for f in found] + dst = [os.path.normcase(d) for d in dst] + self.assertEqual(found, dst) + + +class TestFileSystemFinder(TestFinders, StaticFilesTestCase): + """ + Test FileSystemFinder. + """ + + def setUp(self): + super().setUp() + self.finder = finders.FileSystemFinder() + test_file_path = os.path.join( + TEST_ROOT, "project", "documents", "test", "file.txt" + ) + self.find_first = (os.path.join("test", "file.txt"), test_file_path) + self.find_all = (os.path.join("test", "file.txt"), [test_file_path]) + + +class TestAppDirectoriesFinder(TestFinders, StaticFilesTestCase): + """ + Test AppDirectoriesFinder. + """ + + def setUp(self): + super().setUp() + self.finder = finders.AppDirectoriesFinder() + test_file_path = os.path.join( + TEST_ROOT, "apps", "test", "static", "test", "file1.txt" + ) + self.find_first = (os.path.join("test", "file1.txt"), test_file_path) + self.find_all = (os.path.join("test", "file1.txt"), [test_file_path]) + + +class TestDefaultStorageFinder(TestFinders, StaticFilesTestCase): + """ + Test DefaultStorageFinder. + """ + + def setUp(self): + super().setUp() + self.finder = finders.DefaultStorageFinder( + storage=storage.StaticFilesStorage(location=settings.MEDIA_ROOT) + ) + test_file_path = os.path.join(settings.MEDIA_ROOT, "media-file.txt") + self.find_first = ("media-file.txt", test_file_path) + self.find_all = ("media-file.txt", [test_file_path]) + + +@override_settings( + STATICFILES_FINDERS=["django.contrib.staticfiles.finders.FileSystemFinder"], + STATICFILES_DIRS=[os.path.join(TEST_ROOT, "project", "documents")], +) +class TestMiscFinder(SimpleTestCase): + """ + A few misc finder tests. + """ + + def test_get_finder(self): + self.assertIsInstance( + finders.get_finder("django.contrib.staticfiles.finders.FileSystemFinder"), + finders.FileSystemFinder, + ) + + def test_get_finder_bad_classname(self): + with self.assertRaises(ImportError): + finders.get_finder("django.contrib.staticfiles.finders.FooBarFinder") + + def test_get_finder_bad_module(self): + with self.assertRaises(ImportError): + finders.get_finder("foo.bar.FooBarFinder") + + def test_cache(self): + finders.get_finder.cache_clear() + for n in range(10): + finders.get_finder("django.contrib.staticfiles.finders.FileSystemFinder") + cache_info = finders.get_finder.cache_info() + self.assertEqual(cache_info.hits, 9) + self.assertEqual(cache_info.currsize, 1) + + def test_searched_locations(self): + finders.find("spam") + self.assertEqual( + finders.searched_locations, + [os.path.join(TEST_ROOT, "project", "documents")], + ) + + @override_settings(MEDIA_ROOT="") + def test_location_empty(self): + msg = ( + "The storage backend of the staticfiles finder " + " " + "doesn't have a valid location." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + finders.DefaultStorageFinder() diff --git a/testbed/django__django/tests/staticfiles_tests/test_forms.py b/testbed/django__django/tests/staticfiles_tests/test_forms.py new file mode 100644 index 0000000000000000000000000000000000000000..489140a62c17caed86ec2752529473f770022435 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/test_forms.py @@ -0,0 +1,44 @@ +from urllib.parse import urljoin + +from django.conf import STATICFILES_STORAGE_ALIAS +from django.contrib.staticfiles import storage +from django.forms import Media +from django.templatetags.static import static +from django.test import SimpleTestCase, override_settings + + +class StaticTestStorage(storage.StaticFilesStorage): + def url(self, name): + return urljoin("https://example.com/assets/", name) + + +@override_settings( + INSTALLED_APPS=("django.contrib.staticfiles",), + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.test_forms.StaticTestStorage", + "OPTIONS": {"location": "http://media.example.com/static/"}, + } + }, +) +class StaticFilesFormsMediaTestCase(SimpleTestCase): + def test_absolute_url(self): + m = Media( + css={"all": ("path/to/css1", "/path/to/css2")}, + js=( + "/path/to/js1", + "http://media.other.com/path/to/js2", + "https://secure.other.com/path/to/js3", + static("relative/path/to/js4"), + ), + ) + self.assertEqual( + str(m), + '\n' + '\n' + '\n' + '\n' + '\n' + '', + ) diff --git a/testbed/django__django/tests/staticfiles_tests/test_handlers.py b/testbed/django__django/tests/staticfiles_tests/test_handlers.py new file mode 100644 index 0000000000000000000000000000000000000000..5145d187e1a9f98f2f07e1ab9e816f9eba398aac --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/test_handlers.py @@ -0,0 +1,41 @@ +from django.contrib.staticfiles.handlers import ASGIStaticFilesHandler +from django.core.handlers.asgi import ASGIHandler +from django.test import AsyncRequestFactory + +from .cases import StaticFilesTestCase + + +class MockApplication: + """ASGI application that returns a string indicating that it was called.""" + + async def __call__(self, scope, receive, send): + return "Application called" + + +class TestASGIStaticFilesHandler(StaticFilesTestCase): + async_request_factory = AsyncRequestFactory() + + async def test_get_async_response(self): + request = self.async_request_factory.get("/static/test/file.txt") + handler = ASGIStaticFilesHandler(ASGIHandler()) + response = await handler.get_response_async(request) + response.close() + self.assertEqual(response.status_code, 200) + + async def test_get_async_response_not_found(self): + request = self.async_request_factory.get("/static/test/not-found.txt") + handler = ASGIStaticFilesHandler(ASGIHandler()) + response = await handler.get_response_async(request) + self.assertEqual(response.status_code, 404) + + async def test_non_http_requests_passed_to_the_wrapped_application(self): + tests = [ + "/static/path.txt", + "/non-static/path.txt", + ] + for path in tests: + with self.subTest(path=path): + scope = {"type": "websocket", "path": path} + handler = ASGIStaticFilesHandler(MockApplication()) + response = await handler(scope, None, None) + self.assertEqual(response, "Application called") diff --git a/testbed/django__django/tests/staticfiles_tests/test_liveserver.py b/testbed/django__django/tests/staticfiles_tests/test_liveserver.py new file mode 100644 index 0000000000000000000000000000000000000000..714ebd1a8a587b977c5169c6f5484ab2e1a60e69 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/test_liveserver.py @@ -0,0 +1,81 @@ +""" +A subset of the tests in tests/servers/tests exercising +django.contrib.staticfiles.testing.StaticLiveServerTestCase instead of +django.test.LiveServerTestCase. +""" + +import os +from urllib.request import urlopen + +from django.contrib.staticfiles.testing import StaticLiveServerTestCase +from django.core.exceptions import ImproperlyConfigured +from django.test import modify_settings, override_settings + +TEST_ROOT = os.path.dirname(__file__) +TEST_SETTINGS = { + "MEDIA_URL": "media/", + "STATIC_URL": "static/", + "MEDIA_ROOT": os.path.join(TEST_ROOT, "project", "site_media", "media"), + "STATIC_ROOT": os.path.join(TEST_ROOT, "project", "site_media", "static"), +} + + +class LiveServerBase(StaticLiveServerTestCase): + available_apps = [] + + @classmethod + def setUpClass(cls): + # Override settings + cls.settings_override = override_settings(**TEST_SETTINGS) + cls.settings_override.enable() + cls.addClassCleanup(cls.settings_override.disable) + super().setUpClass() + + +class StaticLiveServerChecks(LiveServerBase): + @classmethod + def setUpClass(cls): + # If contrib.staticfiles isn't configured properly, the exception + # should bubble up to the main thread. + old_STATIC_URL = TEST_SETTINGS["STATIC_URL"] + TEST_SETTINGS["STATIC_URL"] = None + try: + cls.raises_exception() + finally: + TEST_SETTINGS["STATIC_URL"] = old_STATIC_URL + + @classmethod + def tearDownClass(cls): + # skip it, as setUpClass doesn't call its parent either + pass + + @classmethod + def raises_exception(cls): + try: + super().setUpClass() + except ImproperlyConfigured: + # This raises ImproperlyConfigured("You're using the staticfiles + # app without having set the required STATIC_URL setting.") + pass + else: + raise Exception("setUpClass() should have raised an exception.") + + def test_test_test(self): + # Intentionally empty method so that the test is picked up by the + # test runner and the overridden setUpClass() method is executed. + pass + + +class StaticLiveServerView(LiveServerBase): + def urlopen(self, url): + return urlopen(self.live_server_url + url) + + # The test is going to access a static file stored in this application. + @modify_settings(INSTALLED_APPS={"append": "staticfiles_tests.apps.test"}) + def test_collectstatic_emulation(self): + """ + StaticLiveServerTestCase use of staticfiles' serve() allows it + to discover app's static assets without having to collectstatic first. + """ + with self.urlopen("/static/test/file.txt") as f: + self.assertEqual(f.read().rstrip(b"\r\n"), b"In static directory.") diff --git a/testbed/django__django/tests/staticfiles_tests/test_management.py b/testbed/django__django/tests/staticfiles_tests/test_management.py new file mode 100644 index 0000000000000000000000000000000000000000..8398195cec17ac3cb5224e0a1e70298e174d8e23 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/test_management.py @@ -0,0 +1,654 @@ +import datetime +import os +import shutil +import tempfile +import unittest +from io import StringIO +from pathlib import Path +from unittest import mock + +from admin_scripts.tests import AdminScriptTestCase + +from django.conf import STATICFILES_STORAGE_ALIAS, settings +from django.contrib.staticfiles import storage +from django.contrib.staticfiles.management.commands import collectstatic, runserver +from django.core.exceptions import ImproperlyConfigured +from django.core.management import CommandError, call_command +from django.core.management.base import SystemCheckError +from django.test import RequestFactory, override_settings +from django.test.utils import extend_sys_path +from django.utils._os import symlinks_supported +from django.utils.functional import empty + +from .cases import CollectionTestCase, StaticFilesTestCase, TestDefaults +from .settings import TEST_ROOT, TEST_SETTINGS +from .storage import DummyStorage + + +class TestNoFilesCreated: + def test_no_files_created(self): + """ + Make sure no files were create in the destination directory. + """ + self.assertEqual(os.listdir(settings.STATIC_ROOT), []) + + +class TestRunserver(StaticFilesTestCase): + @override_settings(MIDDLEWARE=["django.middleware.common.CommonMiddleware"]) + def test_middleware_loaded_only_once(self): + command = runserver.Command() + with mock.patch("django.middleware.common.CommonMiddleware") as mocked: + command.get_handler(use_static_handler=True, insecure_serving=True) + self.assertEqual(mocked.call_count, 1) + + def test_404_response(self): + command = runserver.Command() + handler = command.get_handler(use_static_handler=True, insecure_serving=True) + missing_static_file = os.path.join(settings.STATIC_URL, "unknown.css") + req = RequestFactory().get(missing_static_file) + with override_settings(DEBUG=False): + response = handler.get_response(req) + self.assertEqual(response.status_code, 404) + with override_settings(DEBUG=True): + response = handler.get_response(req) + self.assertEqual(response.status_code, 404) + + +class TestFindStatic(TestDefaults, CollectionTestCase): + """ + Test ``findstatic`` management command. + """ + + def _get_file(self, filepath): + path = call_command( + "findstatic", filepath, all=False, verbosity=0, stdout=StringIO() + ) + with open(path, encoding="utf-8") as f: + return f.read() + + def test_all_files(self): + """ + findstatic returns all candidate files if run without --first and -v1. + """ + result = call_command( + "findstatic", "test/file.txt", verbosity=1, stdout=StringIO() + ) + lines = [line.strip() for line in result.split("\n")] + self.assertEqual( + len(lines), 3 + ) # three because there is also the "Found here" line + self.assertIn("project", lines[1]) + self.assertIn("apps", lines[2]) + + def test_all_files_less_verbose(self): + """ + findstatic returns all candidate files if run without --first and -v0. + """ + result = call_command( + "findstatic", "test/file.txt", verbosity=0, stdout=StringIO() + ) + lines = [line.strip() for line in result.split("\n")] + self.assertEqual(len(lines), 2) + self.assertIn("project", lines[0]) + self.assertIn("apps", lines[1]) + + def test_all_files_more_verbose(self): + """ + findstatic returns all candidate files if run without --first and -v2. + Also, test that findstatic returns the searched locations with -v2. + """ + result = call_command( + "findstatic", "test/file.txt", verbosity=2, stdout=StringIO() + ) + lines = [line.strip() for line in result.split("\n")] + self.assertIn("project", lines[1]) + self.assertIn("apps", lines[2]) + self.assertIn("Looking in the following locations:", lines[3]) + searched_locations = ", ".join(lines[4:]) + # AppDirectoriesFinder searched locations + self.assertIn( + os.path.join("staticfiles_tests", "apps", "test", "static"), + searched_locations, + ) + self.assertIn( + os.path.join("staticfiles_tests", "apps", "no_label", "static"), + searched_locations, + ) + # FileSystemFinder searched locations + self.assertIn(TEST_SETTINGS["STATICFILES_DIRS"][1][1], searched_locations) + self.assertIn(TEST_SETTINGS["STATICFILES_DIRS"][0], searched_locations) + self.assertIn(str(TEST_SETTINGS["STATICFILES_DIRS"][2]), searched_locations) + # DefaultStorageFinder searched locations + self.assertIn( + os.path.join("staticfiles_tests", "project", "site_media", "media"), + searched_locations, + ) + + +class TestConfiguration(StaticFilesTestCase): + def test_location_empty(self): + msg = "without having set the STATIC_ROOT setting to a filesystem path" + err = StringIO() + for root in ["", None]: + with override_settings(STATIC_ROOT=root): + with self.assertRaisesMessage(ImproperlyConfigured, msg): + call_command( + "collectstatic", interactive=False, verbosity=0, stderr=err + ) + + def test_local_storage_detection_helper(self): + staticfiles_storage = storage.staticfiles_storage + try: + storage.staticfiles_storage._wrapped = empty + with self.settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": ( + "django.contrib.staticfiles.storage.StaticFilesStorage" + ) + } + } + ): + command = collectstatic.Command() + self.assertTrue(command.is_local_storage()) + + storage.staticfiles_storage._wrapped = empty + with self.settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.storage.DummyStorage" + } + } + ): + command = collectstatic.Command() + self.assertFalse(command.is_local_storage()) + + collectstatic.staticfiles_storage = storage.FileSystemStorage() + command = collectstatic.Command() + self.assertTrue(command.is_local_storage()) + + collectstatic.staticfiles_storage = DummyStorage() + command = collectstatic.Command() + self.assertFalse(command.is_local_storage()) + finally: + staticfiles_storage._wrapped = empty + collectstatic.staticfiles_storage = staticfiles_storage + storage.staticfiles_storage = staticfiles_storage + + @override_settings(STATICFILES_DIRS=("test")) + def test_collectstatis_check(self): + msg = "The STATICFILES_DIRS setting is not a tuple or list." + with self.assertRaisesMessage(SystemCheckError, msg): + call_command("collectstatic", skip_checks=False) + + +class TestCollectionHelpSubcommand(AdminScriptTestCase): + @override_settings(STATIC_ROOT=None) + def test_missing_settings_dont_prevent_help(self): + """ + Even if the STATIC_ROOT setting is not set, one can still call the + `manage.py help collectstatic` command. + """ + self.write_settings("settings.py", apps=["django.contrib.staticfiles"]) + out, err = self.run_manage(["help", "collectstatic"]) + self.assertNoOutput(err) + + +class TestCollection(TestDefaults, CollectionTestCase): + """ + Test ``collectstatic`` management command. + """ + + def test_ignore(self): + """ + -i patterns are ignored. + """ + self.assertFileNotFound("test/test.ignoreme") + + def test_common_ignore_patterns(self): + """ + Common ignore patterns (*~, .*, CVS) are ignored. + """ + self.assertFileNotFound("test/.hidden") + self.assertFileNotFound("test/backup~") + self.assertFileNotFound("test/CVS") + + def test_pathlib(self): + self.assertFileContains("pathlib.txt", "pathlib") + + +class TestCollectionPathLib(TestCollection): + def mkdtemp(self): + tmp_dir = super().mkdtemp() + return Path(tmp_dir) + + +class TestCollectionVerbosity(CollectionTestCase): + copying_msg = "Copying " + run_collectstatic_in_setUp = False + post_process_msg = "Post-processed" + staticfiles_copied_msg = "static files copied to" + + def test_verbosity_0(self): + stdout = StringIO() + self.run_collectstatic(verbosity=0, stdout=stdout) + self.assertEqual(stdout.getvalue(), "") + + def test_verbosity_1(self): + stdout = StringIO() + self.run_collectstatic(verbosity=1, stdout=stdout) + output = stdout.getvalue() + self.assertIn(self.staticfiles_copied_msg, output) + self.assertNotIn(self.copying_msg, output) + + def test_verbosity_2(self): + stdout = StringIO() + self.run_collectstatic(verbosity=2, stdout=stdout) + output = stdout.getvalue() + self.assertIn(self.staticfiles_copied_msg, output) + self.assertIn(self.copying_msg, output) + + @override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": ( + "django.contrib.staticfiles.storage.ManifestStaticFilesStorage" + ) + }, + } + ) + def test_verbosity_1_with_post_process(self): + stdout = StringIO() + self.run_collectstatic(verbosity=1, stdout=stdout, post_process=True) + self.assertNotIn(self.post_process_msg, stdout.getvalue()) + + @override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": ( + "django.contrib.staticfiles.storage.ManifestStaticFilesStorage" + ) + }, + } + ) + def test_verbosity_2_with_post_process(self): + stdout = StringIO() + self.run_collectstatic(verbosity=2, stdout=stdout, post_process=True) + self.assertIn(self.post_process_msg, stdout.getvalue()) + + +class TestCollectionClear(CollectionTestCase): + """ + Test the ``--clear`` option of the ``collectstatic`` management command. + """ + + def run_collectstatic(self, **kwargs): + clear_filepath = os.path.join(settings.STATIC_ROOT, "cleared.txt") + with open(clear_filepath, "w") as f: + f.write("should be cleared") + super().run_collectstatic(clear=True) + + def test_cleared_not_found(self): + self.assertFileNotFound("cleared.txt") + + def test_dir_not_exists(self, **kwargs): + shutil.rmtree(settings.STATIC_ROOT) + super().run_collectstatic(clear=True) + + @override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.storage.PathNotImplementedStorage" + }, + } + ) + def test_handle_path_notimplemented(self): + self.run_collectstatic() + self.assertFileNotFound("cleared.txt") + + +class TestInteractiveMessages(CollectionTestCase): + overwrite_warning_msg = "This will overwrite existing files!" + delete_warning_msg = "This will DELETE ALL FILES in this location!" + files_copied_msg = "static files copied" + + @staticmethod + def mock_input(stdout): + def _input(msg): + stdout.write(msg) + return "yes" + + return _input + + def test_warning_when_clearing_staticdir(self): + stdout = StringIO() + self.run_collectstatic() + with mock.patch("builtins.input", side_effect=self.mock_input(stdout)): + call_command("collectstatic", interactive=True, clear=True, stdout=stdout) + + output = stdout.getvalue() + self.assertNotIn(self.overwrite_warning_msg, output) + self.assertIn(self.delete_warning_msg, output) + + def test_warning_when_overwriting_files_in_staticdir(self): + stdout = StringIO() + self.run_collectstatic() + with mock.patch("builtins.input", side_effect=self.mock_input(stdout)): + call_command("collectstatic", interactive=True, stdout=stdout) + output = stdout.getvalue() + self.assertIn(self.overwrite_warning_msg, output) + self.assertNotIn(self.delete_warning_msg, output) + + def test_no_warning_when_staticdir_does_not_exist(self): + stdout = StringIO() + shutil.rmtree(settings.STATIC_ROOT) + call_command("collectstatic", interactive=True, stdout=stdout) + output = stdout.getvalue() + self.assertNotIn(self.overwrite_warning_msg, output) + self.assertNotIn(self.delete_warning_msg, output) + self.assertIn(self.files_copied_msg, output) + + def test_no_warning_for_empty_staticdir(self): + stdout = StringIO() + with tempfile.TemporaryDirectory( + prefix="collectstatic_empty_staticdir_test" + ) as static_dir: + with override_settings(STATIC_ROOT=static_dir): + call_command("collectstatic", interactive=True, stdout=stdout) + output = stdout.getvalue() + self.assertNotIn(self.overwrite_warning_msg, output) + self.assertNotIn(self.delete_warning_msg, output) + self.assertIn(self.files_copied_msg, output) + + def test_cancelled(self): + self.run_collectstatic() + with mock.patch("builtins.input", side_effect=lambda _: "no"): + with self.assertRaisesMessage( + CommandError, "Collecting static files cancelled" + ): + call_command("collectstatic", interactive=True) + + +class TestCollectionNoDefaultIgnore(TestDefaults, CollectionTestCase): + """ + The ``--no-default-ignore`` option of the ``collectstatic`` management + command. + """ + + def run_collectstatic(self): + super().run_collectstatic(use_default_ignore_patterns=False) + + def test_no_common_ignore_patterns(self): + """ + With --no-default-ignore, common ignore patterns (*~, .*, CVS) + are not ignored. + """ + self.assertFileContains("test/.hidden", "should be ignored") + self.assertFileContains("test/backup~", "should be ignored") + self.assertFileContains("test/CVS", "should be ignored") + + +@override_settings( + INSTALLED_APPS=[ + "staticfiles_tests.apps.staticfiles_config.IgnorePatternsAppConfig", + "staticfiles_tests.apps.test", + ] +) +class TestCollectionCustomIgnorePatterns(CollectionTestCase): + def test_custom_ignore_patterns(self): + """ + A custom ignore_patterns list, ['*.css', '*/vendor/*.js'] in this case, + can be specified in an AppConfig definition. + """ + self.assertFileNotFound("test/nonascii.css") + self.assertFileContains("test/.hidden", "should be ignored") + self.assertFileNotFound(os.path.join("test", "vendor", "module.js")) + + +class TestCollectionDryRun(TestNoFilesCreated, CollectionTestCase): + """ + Test ``--dry-run`` option for ``collectstatic`` management command. + """ + + def run_collectstatic(self): + super().run_collectstatic(dry_run=True) + + +@override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage" + }, + } +) +class TestCollectionDryRunManifestStaticFilesStorage(TestCollectionDryRun): + pass + + +class TestCollectionFilesOverride(CollectionTestCase): + """ + Test overriding duplicated files by ``collectstatic`` management command. + Check for proper handling of apps order in installed apps even if file modification + dates are in different order: + 'staticfiles_test_app', + 'staticfiles_tests.apps.no_label', + """ + + def setUp(self): + self.temp_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, self.temp_dir) + + # get modification and access times for no_label/static/file2.txt + self.orig_path = os.path.join( + TEST_ROOT, "apps", "no_label", "static", "file2.txt" + ) + self.orig_mtime = os.path.getmtime(self.orig_path) + self.orig_atime = os.path.getatime(self.orig_path) + + # prepare duplicate of file2.txt from a temporary app + # this file will have modification time older than no_label/static/file2.txt + # anyway it should be taken to STATIC_ROOT because the temporary app is before + # 'no_label' app in installed apps + self.temp_app_path = os.path.join(self.temp_dir, "staticfiles_test_app") + self.testfile_path = os.path.join(self.temp_app_path, "static", "file2.txt") + + os.makedirs(self.temp_app_path) + with open(os.path.join(self.temp_app_path, "__init__.py"), "w+"): + pass + + os.makedirs(os.path.dirname(self.testfile_path)) + with open(self.testfile_path, "w+") as f: + f.write("duplicate of file2.txt") + + os.utime(self.testfile_path, (self.orig_atime - 1, self.orig_mtime - 1)) + + self.settings_with_test_app = self.modify_settings( + INSTALLED_APPS={"prepend": "staticfiles_test_app"}, + ) + with extend_sys_path(self.temp_dir): + self.settings_with_test_app.enable() + + super().setUp() + + def tearDown(self): + super().tearDown() + self.settings_with_test_app.disable() + + def test_ordering_override(self): + """ + Test if collectstatic takes files in proper order + """ + self.assertFileContains("file2.txt", "duplicate of file2.txt") + + # run collectstatic again + self.run_collectstatic() + + self.assertFileContains("file2.txt", "duplicate of file2.txt") + + +# The collectstatic test suite already has conflicting files since both +# project/test/file.txt and apps/test/static/test/file.txt are collected. To +# properly test for the warning not happening unless we tell it to explicitly, +# we remove the project directory and will add back a conflicting file later. +@override_settings(STATICFILES_DIRS=[]) +class TestCollectionOverwriteWarning(CollectionTestCase): + """ + Test warning in ``collectstatic`` output when a file is skipped because a + previous file was already written to the same path. + """ + + # If this string is in the collectstatic output, it means the warning we're + # looking for was emitted. + warning_string = "Found another file" + + def _collectstatic_output(self, **kwargs): + """ + Run collectstatic, and capture and return the output. We want to run + the command at highest verbosity, which is why we can't + just call e.g. BaseCollectionTestCase.run_collectstatic() + """ + out = StringIO() + call_command( + "collectstatic", interactive=False, verbosity=3, stdout=out, **kwargs + ) + return out.getvalue() + + def test_no_warning(self): + """ + There isn't a warning if there isn't a duplicate destination. + """ + output = self._collectstatic_output(clear=True) + self.assertNotIn(self.warning_string, output) + + def test_warning(self): + """ + There is a warning when there are duplicate destinations. + """ + with tempfile.TemporaryDirectory() as static_dir: + duplicate = os.path.join(static_dir, "test", "file.txt") + os.mkdir(os.path.dirname(duplicate)) + with open(duplicate, "w+") as f: + f.write("duplicate of file.txt") + + with self.settings(STATICFILES_DIRS=[static_dir]): + output = self._collectstatic_output(clear=True) + self.assertIn(self.warning_string, output) + + os.remove(duplicate) + + # Make sure the warning went away again. + with self.settings(STATICFILES_DIRS=[static_dir]): + output = self._collectstatic_output(clear=True) + self.assertNotIn(self.warning_string, output) + + +@override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.storage.DummyStorage" + }, + } +) +class TestCollectionNonLocalStorage(TestNoFilesCreated, CollectionTestCase): + """ + Tests for a Storage that implements get_modified_time() but not path() + (#15035). + """ + + def test_storage_properties(self): + # Properties of the Storage as described in the ticket. + storage = DummyStorage() + self.assertEqual( + storage.get_modified_time("name"), + datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc), + ) + with self.assertRaisesMessage( + NotImplementedError, "This backend doesn't support absolute paths." + ): + storage.path("name") + + +class TestCollectionNeverCopyStorage(CollectionTestCase): + @override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.storage.NeverCopyRemoteStorage" + }, + } + ) + def test_skips_newer_files_in_remote_storage(self): + """ + collectstatic skips newer files in a remote storage. + run_collectstatic() in setUp() copies the static files, then files are + always skipped after NeverCopyRemoteStorage is activated since + NeverCopyRemoteStorage.get_modified_time() returns a datetime in the + future to simulate an unmodified file. + """ + stdout = StringIO() + self.run_collectstatic(stdout=stdout, verbosity=2) + output = stdout.getvalue() + self.assertIn("Skipping 'test.txt' (not modified)", output) + + +@unittest.skipUnless(symlinks_supported(), "Must be able to symlink to run this test.") +class TestCollectionLinks(TestDefaults, CollectionTestCase): + """ + Test ``--link`` option for ``collectstatic`` management command. + + Note that by inheriting ``TestDefaults`` we repeat all + the standard file resolving tests here, to make sure using + ``--link`` does not change the file-selection semantics. + """ + + def run_collectstatic(self, clear=False, link=True, **kwargs): + super().run_collectstatic(link=link, clear=clear, **kwargs) + + def test_links_created(self): + """ + With ``--link``, symbolic links are created. + """ + self.assertTrue(os.path.islink(os.path.join(settings.STATIC_ROOT, "test.txt"))) + + def test_broken_symlink(self): + """ + Test broken symlink gets deleted. + """ + path = os.path.join(settings.STATIC_ROOT, "test.txt") + os.unlink(path) + self.run_collectstatic() + self.assertTrue(os.path.islink(path)) + + def test_symlinks_and_files_replaced(self): + """ + Running collectstatic in non-symlink mode replaces symlinks with files, + while symlink mode replaces files with symlinks. + """ + path = os.path.join(settings.STATIC_ROOT, "test.txt") + self.assertTrue(os.path.islink(path)) + self.run_collectstatic(link=False) + self.assertFalse(os.path.islink(path)) + self.run_collectstatic(link=True) + self.assertTrue(os.path.islink(path)) + + def test_clear_broken_symlink(self): + """ + With ``--clear``, broken symbolic links are deleted. + """ + nonexistent_file_path = os.path.join(settings.STATIC_ROOT, "nonexistent.txt") + broken_symlink_path = os.path.join(settings.STATIC_ROOT, "symlink.txt") + os.symlink(nonexistent_file_path, broken_symlink_path) + self.run_collectstatic(clear=True) + self.assertFalse(os.path.lexists(broken_symlink_path)) + + @override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.storage.PathNotImplementedStorage" + } + } + ) + def test_no_remote_link(self): + with self.assertRaisesMessage( + CommandError, "Can't symlink to a remote destination." + ): + self.run_collectstatic() diff --git a/testbed/django__django/tests/staticfiles_tests/test_storage.py b/testbed/django__django/tests/staticfiles_tests/test_storage.py new file mode 100644 index 0000000000000000000000000000000000000000..dc8607a307f9fa582185e4135651985148e808e3 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/test_storage.py @@ -0,0 +1,894 @@ +import json +import os +import shutil +import sys +import tempfile +import unittest +from io import StringIO +from pathlib import Path +from unittest import mock + +from django.conf import STATICFILES_STORAGE_ALIAS, settings +from django.contrib.staticfiles import finders, storage +from django.contrib.staticfiles.management.commands.collectstatic import ( + Command as CollectstaticCommand, +) +from django.core.management import call_command +from django.test import SimpleTestCase, override_settings + +from .cases import CollectionTestCase +from .settings import TEST_ROOT + + +def hashed_file_path(test, path): + fullpath = test.render_template(test.static_template_snippet(path)) + return fullpath.replace(settings.STATIC_URL, "") + + +class TestHashedFiles: + hashed_file_path = hashed_file_path + + def tearDown(self): + # Clear hashed files to avoid side effects among tests. + storage.staticfiles_storage.hashed_files.clear() + + def assertPostCondition(self): + """ + Assert post conditions for a test are met. Must be manually called at + the end of each test. + """ + pass + + def test_template_tag_return(self): + self.assertStaticRaises( + ValueError, "does/not/exist.png", "/static/does/not/exist.png" + ) + self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt") + self.assertStaticRenders( + "test/file.txt", "/static/test/file.dad0999e4f8f.txt", asvar=True + ) + self.assertStaticRenders( + "cached/styles.css", "/static/cached/styles.5e0040571e1a.css" + ) + self.assertStaticRenders("path/", "/static/path/") + self.assertStaticRenders("path/?query", "/static/path/?query") + self.assertPostCondition() + + def test_template_tag_simple_content(self): + relpath = self.hashed_file_path("cached/styles.css") + self.assertEqual(relpath, "cached/styles.5e0040571e1a.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertNotIn(b"cached/other.css", content) + self.assertIn(b"other.d41d8cd98f00.css", content) + self.assertPostCondition() + + def test_path_ignored_completely(self): + relpath = self.hashed_file_path("cached/css/ignored.css") + self.assertEqual(relpath, "cached/css/ignored.55e7c226dda1.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertIn(b"#foobar", content) + self.assertIn(b"http:foobar", content) + self.assertIn(b"https:foobar", content) + self.assertIn(b"data:foobar", content) + self.assertIn(b"chrome:foobar", content) + self.assertIn(b"//foobar", content) + self.assertIn(b"url()", content) + self.assertPostCondition() + + def test_path_with_querystring(self): + relpath = self.hashed_file_path("cached/styles.css?spam=eggs") + self.assertEqual(relpath, "cached/styles.5e0040571e1a.css?spam=eggs") + with storage.staticfiles_storage.open( + "cached/styles.5e0040571e1a.css" + ) as relfile: + content = relfile.read() + self.assertNotIn(b"cached/other.css", content) + self.assertIn(b"other.d41d8cd98f00.css", content) + self.assertPostCondition() + + def test_path_with_fragment(self): + relpath = self.hashed_file_path("cached/styles.css#eggs") + self.assertEqual(relpath, "cached/styles.5e0040571e1a.css#eggs") + with storage.staticfiles_storage.open( + "cached/styles.5e0040571e1a.css" + ) as relfile: + content = relfile.read() + self.assertNotIn(b"cached/other.css", content) + self.assertIn(b"other.d41d8cd98f00.css", content) + self.assertPostCondition() + + def test_path_with_querystring_and_fragment(self): + relpath = self.hashed_file_path("cached/css/fragments.css") + self.assertEqual(relpath, "cached/css/fragments.a60c0e74834f.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertIn(b"fonts/font.b9b105392eb8.eot?#iefix", content) + self.assertIn(b"fonts/font.b8d603e42714.svg#webfontIyfZbseF", content) + self.assertIn( + b"fonts/font.b8d603e42714.svg#path/to/../../fonts/font.svg", content + ) + self.assertIn( + b"data:font/woff;charset=utf-8;" + b"base64,d09GRgABAAAAADJoAA0AAAAAR2QAAQAAAAAAAAAAAAA", + content, + ) + self.assertIn(b"#default#VML", content) + self.assertPostCondition() + + def test_template_tag_absolute(self): + relpath = self.hashed_file_path("cached/absolute.css") + self.assertEqual(relpath, "cached/absolute.eb04def9f9a4.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertNotIn(b"/static/cached/styles.css", content) + self.assertIn(b"/static/cached/styles.5e0040571e1a.css", content) + self.assertNotIn(b"/static/styles_root.css", content) + self.assertIn(b"/static/styles_root.401f2509a628.css", content) + self.assertIn(b"/static/cached/img/relative.acae32e4532b.png", content) + self.assertPostCondition() + + def test_template_tag_absolute_root(self): + """ + Like test_template_tag_absolute, but for a file in STATIC_ROOT (#26249). + """ + relpath = self.hashed_file_path("absolute_root.css") + self.assertEqual(relpath, "absolute_root.f821df1b64f7.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertNotIn(b"/static/styles_root.css", content) + self.assertIn(b"/static/styles_root.401f2509a628.css", content) + self.assertPostCondition() + + def test_template_tag_relative(self): + relpath = self.hashed_file_path("cached/relative.css") + self.assertEqual(relpath, "cached/relative.c3e9e1ea6f2e.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertNotIn(b"../cached/styles.css", content) + self.assertNotIn(b'@import "styles.css"', content) + self.assertNotIn(b"url(img/relative.png)", content) + self.assertIn(b'url("img/relative.acae32e4532b.png")', content) + self.assertIn(b"../cached/styles.5e0040571e1a.css", content) + self.assertPostCondition() + + def test_import_replacement(self): + "See #18050" + relpath = self.hashed_file_path("cached/import.css") + self.assertEqual(relpath, "cached/import.f53576679e5a.css") + with storage.staticfiles_storage.open(relpath) as relfile: + self.assertIn(b"""import url("styles.5e0040571e1a.css")""", relfile.read()) + self.assertPostCondition() + + def test_template_tag_deep_relative(self): + relpath = self.hashed_file_path("cached/css/window.css") + self.assertEqual(relpath, "cached/css/window.5d5c10836967.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertNotIn(b"url(img/window.png)", content) + self.assertIn(b'url("img/window.acae32e4532b.png")', content) + self.assertPostCondition() + + def test_template_tag_url(self): + relpath = self.hashed_file_path("cached/url.css") + self.assertEqual(relpath, "cached/url.902310b73412.css") + with storage.staticfiles_storage.open(relpath) as relfile: + self.assertIn(b"https://", relfile.read()) + self.assertPostCondition() + + @override_settings( + STATICFILES_DIRS=[os.path.join(TEST_ROOT, "project", "loop")], + STATICFILES_FINDERS=["django.contrib.staticfiles.finders.FileSystemFinder"], + ) + def test_import_loop(self): + finders.get_finder.cache_clear() + err = StringIO() + with self.assertRaisesMessage(RuntimeError, "Max post-process passes exceeded"): + call_command("collectstatic", interactive=False, verbosity=0, stderr=err) + self.assertEqual("Post-processing 'All' failed!\n\n", err.getvalue()) + self.assertPostCondition() + + def test_post_processing(self): + """ + post_processing behaves correctly. + + Files that are alterable should always be post-processed; files that + aren't should be skipped. + + collectstatic has already been called once in setUp() for this testcase, + therefore we check by verifying behavior on a second run. + """ + collectstatic_args = { + "interactive": False, + "verbosity": 0, + "link": False, + "clear": False, + "dry_run": False, + "post_process": True, + "use_default_ignore_patterns": True, + "ignore_patterns": ["*.ignoreme"], + } + + collectstatic_cmd = CollectstaticCommand() + collectstatic_cmd.set_options(**collectstatic_args) + stats = collectstatic_cmd.collect() + self.assertIn( + os.path.join("cached", "css", "window.css"), stats["post_processed"] + ) + self.assertIn( + os.path.join("cached", "css", "img", "window.png"), stats["unmodified"] + ) + self.assertIn(os.path.join("test", "nonascii.css"), stats["post_processed"]) + # No file should be yielded twice. + self.assertCountEqual(stats["post_processed"], set(stats["post_processed"])) + self.assertPostCondition() + + def test_css_import_case_insensitive(self): + relpath = self.hashed_file_path("cached/styles_insensitive.css") + self.assertEqual(relpath, "cached/styles_insensitive.3fa427592a53.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertNotIn(b"cached/other.css", content) + self.assertIn(b"other.d41d8cd98f00.css", content) + self.assertPostCondition() + + def test_css_source_map(self): + relpath = self.hashed_file_path("cached/source_map.css") + self.assertEqual(relpath, "cached/source_map.b2fceaf426aa.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertNotIn(b"/*# sourceMappingURL=source_map.css.map*/", content) + self.assertIn( + b"/*# sourceMappingURL=source_map.css.99914b932bd3.map */", + content, + ) + self.assertPostCondition() + + def test_css_source_map_tabs(self): + relpath = self.hashed_file_path("cached/source_map_tabs.css") + self.assertEqual(relpath, "cached/source_map_tabs.b2fceaf426aa.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertNotIn(b"/*#\tsourceMappingURL=source_map.css.map\t*/", content) + self.assertIn( + b"/*# sourceMappingURL=source_map.css.99914b932bd3.map */", + content, + ) + self.assertPostCondition() + + def test_css_source_map_sensitive(self): + relpath = self.hashed_file_path("cached/source_map_sensitive.css") + self.assertEqual(relpath, "cached/source_map_sensitive.456683f2106f.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertIn(b"/*# sOuRcEMaPpInGURL=source_map.css.map */", content) + self.assertNotIn( + b"/*# sourceMappingURL=source_map.css.99914b932bd3.map */", + content, + ) + self.assertPostCondition() + + def test_css_source_map_data_uri(self): + relpath = self.hashed_file_path("cached/source_map_data_uri.css") + self.assertEqual(relpath, "cached/source_map_data_uri.3166be10260d.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + source_map_data_uri = ( + b"/*# sourceMappingURL=data:application/json;charset=utf8;base64," + b"eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIl9zcmMv*/" + ) + self.assertIn(source_map_data_uri, content) + self.assertPostCondition() + + def test_js_source_map(self): + relpath = self.hashed_file_path("cached/source_map.js") + self.assertEqual(relpath, "cached/source_map.cd45b8534a87.js") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertNotIn(b"//# sourceMappingURL=source_map.js.map", content) + self.assertIn( + b"//# sourceMappingURL=source_map.js.99914b932bd3.map", + content, + ) + self.assertPostCondition() + + def test_js_source_map_trailing_whitespace(self): + relpath = self.hashed_file_path("cached/source_map_trailing_whitespace.js") + self.assertEqual( + relpath, "cached/source_map_trailing_whitespace.cd45b8534a87.js" + ) + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertNotIn(b"//# sourceMappingURL=source_map.js.map\t ", content) + self.assertIn( + b"//# sourceMappingURL=source_map.js.99914b932bd3.map", + content, + ) + self.assertPostCondition() + + def test_js_source_map_sensitive(self): + relpath = self.hashed_file_path("cached/source_map_sensitive.js") + self.assertEqual(relpath, "cached/source_map_sensitive.5da96fdd3cb3.js") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertIn(b"//# sOuRcEMaPpInGURL=source_map.js.map", content) + self.assertNotIn( + b"//# sourceMappingURL=source_map.js.99914b932bd3.map", + content, + ) + self.assertPostCondition() + + def test_js_source_map_data_uri(self): + relpath = self.hashed_file_path("cached/source_map_data_uri.js") + self.assertEqual(relpath, "cached/source_map_data_uri.a68d23cbf6dd.js") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + source_map_data_uri = ( + b"//# sourceMappingURL=data:application/json;charset=utf8;base64," + b"eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIl9zcmMv" + ) + self.assertIn(source_map_data_uri, content) + self.assertPostCondition() + + @override_settings( + STATICFILES_DIRS=[os.path.join(TEST_ROOT, "project", "faulty")], + STATICFILES_FINDERS=["django.contrib.staticfiles.finders.FileSystemFinder"], + ) + def test_post_processing_failure(self): + """ + post_processing indicates the origin of the error when it fails. + """ + finders.get_finder.cache_clear() + err = StringIO() + with self.assertRaises(Exception): + call_command("collectstatic", interactive=False, verbosity=0, stderr=err) + self.assertEqual("Post-processing 'faulty.css' failed!\n\n", err.getvalue()) + self.assertPostCondition() + + @override_settings( + STATICFILES_DIRS=[os.path.join(TEST_ROOT, "project", "nonutf8")], + STATICFILES_FINDERS=["django.contrib.staticfiles.finders.FileSystemFinder"], + ) + def test_post_processing_nonutf8(self): + finders.get_finder.cache_clear() + err = StringIO() + with self.assertRaises(UnicodeDecodeError): + call_command("collectstatic", interactive=False, verbosity=0, stderr=err) + self.assertEqual("Post-processing 'nonutf8.css' failed!\n\n", err.getvalue()) + self.assertPostCondition() + + +@override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.storage.ExtraPatternsStorage", + }, + } +) +class TestExtraPatternsStorage(CollectionTestCase): + def setUp(self): + storage.staticfiles_storage.hashed_files.clear() # avoid cache interference + super().setUp() + + def cached_file_path(self, path): + fullpath = self.render_template(self.static_template_snippet(path)) + return fullpath.replace(settings.STATIC_URL, "") + + def test_multi_extension_patterns(self): + """ + With storage classes having several file extension patterns, only the + files matching a specific file pattern should be affected by the + substitution (#19670). + """ + # CSS files shouldn't be touched by JS patterns. + relpath = self.cached_file_path("cached/import.css") + self.assertEqual(relpath, "cached/import.f53576679e5a.css") + with storage.staticfiles_storage.open(relpath) as relfile: + self.assertIn(b'import url("styles.5e0040571e1a.css")', relfile.read()) + + # Confirm JS patterns have been applied to JS files. + relpath = self.cached_file_path("cached/test.js") + self.assertEqual(relpath, "cached/test.388d7a790d46.js") + with storage.staticfiles_storage.open(relpath) as relfile: + self.assertIn(b'JS_URL("import.f53576679e5a.css")', relfile.read()) + + +@override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage", + }, + } +) +class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase): + """ + Tests for the Cache busting storage + """ + + def setUp(self): + super().setUp() + + temp_dir = tempfile.mkdtemp() + os.makedirs(os.path.join(temp_dir, "test")) + self._clear_filename = os.path.join(temp_dir, "test", "cleared.txt") + with open(self._clear_filename, "w") as f: + f.write("to be deleted in one test") + + self.patched_settings = self.settings( + STATICFILES_DIRS=settings.STATICFILES_DIRS + [temp_dir], + ) + self.patched_settings.enable() + self.addCleanup(shutil.rmtree, temp_dir) + self._manifest_strict = storage.staticfiles_storage.manifest_strict + + def tearDown(self): + self.patched_settings.disable() + + if os.path.exists(self._clear_filename): + os.unlink(self._clear_filename) + + storage.staticfiles_storage.manifest_strict = self._manifest_strict + super().tearDown() + + def assertPostCondition(self): + hashed_files = storage.staticfiles_storage.hashed_files + # The in-memory version of the manifest matches the one on disk + # since a properly created manifest should cover all filenames. + if hashed_files: + manifest, _ = storage.staticfiles_storage.load_manifest() + self.assertEqual(hashed_files, manifest) + + def test_manifest_exists(self): + filename = storage.staticfiles_storage.manifest_name + path = storage.staticfiles_storage.path(filename) + self.assertTrue(os.path.exists(path)) + + def test_manifest_does_not_exist(self): + storage.staticfiles_storage.manifest_name = "does.not.exist.json" + self.assertIsNone(storage.staticfiles_storage.read_manifest()) + + def test_manifest_does_not_ignore_permission_error(self): + with mock.patch("builtins.open", side_effect=PermissionError): + with self.assertRaises(PermissionError): + storage.staticfiles_storage.read_manifest() + + def test_loaded_cache(self): + self.assertNotEqual(storage.staticfiles_storage.hashed_files, {}) + manifest_content = storage.staticfiles_storage.read_manifest() + self.assertIn( + '"version": "%s"' % storage.staticfiles_storage.manifest_version, + manifest_content, + ) + + def test_parse_cache(self): + hashed_files = storage.staticfiles_storage.hashed_files + manifest, _ = storage.staticfiles_storage.load_manifest() + self.assertEqual(hashed_files, manifest) + + def test_clear_empties_manifest(self): + cleared_file_name = storage.staticfiles_storage.clean_name( + os.path.join("test", "cleared.txt") + ) + # collect the additional file + self.run_collectstatic() + + hashed_files = storage.staticfiles_storage.hashed_files + self.assertIn(cleared_file_name, hashed_files) + + manifest_content, _ = storage.staticfiles_storage.load_manifest() + self.assertIn(cleared_file_name, manifest_content) + + original_path = storage.staticfiles_storage.path(cleared_file_name) + self.assertTrue(os.path.exists(original_path)) + + # delete the original file form the app, collect with clear + os.unlink(self._clear_filename) + self.run_collectstatic(clear=True) + + self.assertFileNotFound(original_path) + + hashed_files = storage.staticfiles_storage.hashed_files + self.assertNotIn(cleared_file_name, hashed_files) + + manifest_content, _ = storage.staticfiles_storage.load_manifest() + self.assertNotIn(cleared_file_name, manifest_content) + + def test_missing_entry(self): + missing_file_name = "cached/missing.css" + configured_storage = storage.staticfiles_storage + self.assertNotIn(missing_file_name, configured_storage.hashed_files) + + # File name not found in manifest + with self.assertRaisesMessage( + ValueError, + "Missing staticfiles manifest entry for '%s'" % missing_file_name, + ): + self.hashed_file_path(missing_file_name) + + configured_storage.manifest_strict = False + # File doesn't exist on disk + err_msg = "The file '%s' could not be found with %r." % ( + missing_file_name, + configured_storage._wrapped, + ) + with self.assertRaisesMessage(ValueError, err_msg): + self.hashed_file_path(missing_file_name) + + content = StringIO() + content.write("Found") + configured_storage.save(missing_file_name, content) + # File exists on disk + self.hashed_file_path(missing_file_name) + + def test_intermediate_files(self): + cached_files = os.listdir(os.path.join(settings.STATIC_ROOT, "cached")) + # Intermediate files shouldn't be created for reference. + self.assertEqual( + len( + [ + cached_file + for cached_file in cached_files + if cached_file.startswith("relative.") + ] + ), + 2, + ) + + def test_manifest_hash(self): + # Collect the additional file. + self.run_collectstatic() + + _, manifest_hash_orig = storage.staticfiles_storage.load_manifest() + self.assertNotEqual(manifest_hash_orig, "") + self.assertEqual(storage.staticfiles_storage.manifest_hash, manifest_hash_orig) + # Saving doesn't change the hash. + storage.staticfiles_storage.save_manifest() + self.assertEqual(storage.staticfiles_storage.manifest_hash, manifest_hash_orig) + # Delete the original file from the app, collect with clear. + os.unlink(self._clear_filename) + self.run_collectstatic(clear=True) + # Hash is changed. + _, manifest_hash = storage.staticfiles_storage.load_manifest() + self.assertNotEqual(manifest_hash, manifest_hash_orig) + + def test_manifest_hash_v1(self): + storage.staticfiles_storage.manifest_name = "staticfiles_v1.json" + manifest_content, manifest_hash = storage.staticfiles_storage.load_manifest() + self.assertEqual(manifest_hash, "") + self.assertEqual(manifest_content, {"dummy.txt": "dummy.txt"}) + + +@override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.storage.NoneHashStorage", + }, + } +) +class TestCollectionNoneHashStorage(CollectionTestCase): + hashed_file_path = hashed_file_path + + def test_hashed_name(self): + relpath = self.hashed_file_path("cached/styles.css") + self.assertEqual(relpath, "cached/styles.css") + + +@override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.storage.NoPostProcessReplacedPathStorage", + }, + } +) +class TestCollectionNoPostProcessReplacedPaths(CollectionTestCase): + run_collectstatic_in_setUp = False + + def test_collectstatistic_no_post_process_replaced_paths(self): + stdout = StringIO() + self.run_collectstatic(verbosity=1, stdout=stdout) + self.assertIn("post-processed", stdout.getvalue()) + + +@override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.storage.SimpleStorage", + }, + } +) +class TestCollectionSimpleStorage(CollectionTestCase): + hashed_file_path = hashed_file_path + + def setUp(self): + storage.staticfiles_storage.hashed_files.clear() # avoid cache interference + super().setUp() + + def test_template_tag_return(self): + self.assertStaticRaises( + ValueError, "does/not/exist.png", "/static/does/not/exist.png" + ) + self.assertStaticRenders("test/file.txt", "/static/test/file.deploy12345.txt") + self.assertStaticRenders( + "cached/styles.css", "/static/cached/styles.deploy12345.css" + ) + self.assertStaticRenders("path/", "/static/path/") + self.assertStaticRenders("path/?query", "/static/path/?query") + + def test_template_tag_simple_content(self): + relpath = self.hashed_file_path("cached/styles.css") + self.assertEqual(relpath, "cached/styles.deploy12345.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertNotIn(b"cached/other.css", content) + self.assertIn(b"other.deploy12345.css", content) + + +class JSModuleImportAggregationManifestStorage(storage.ManifestStaticFilesStorage): + support_js_module_import_aggregation = True + + +@override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": ( + "staticfiles_tests.test_storage." + "JSModuleImportAggregationManifestStorage" + ), + }, + } +) +class TestCollectionJSModuleImportAggregationManifestStorage(CollectionTestCase): + hashed_file_path = hashed_file_path + + def test_module_import(self): + relpath = self.hashed_file_path("cached/module.js") + self.assertEqual(relpath, "cached/module.55fd6938fbc5.js") + tests = [ + # Relative imports. + b'import testConst from "./module_test.477bbebe77f0.js";', + b'import relativeModule from "../nested/js/nested.866475c46bb4.js";', + b'import { firstConst, secondConst } from "./module_test.477bbebe77f0.js";', + # Absolute import. + b'import rootConst from "/static/absolute_root.5586327fe78c.js";', + # Dynamic import. + b'const dynamicModule = import("./module_test.477bbebe77f0.js");', + # Creating a module object. + b'import * as NewModule from "./module_test.477bbebe77f0.js";', + # Aliases. + b'import { testConst as alias } from "./module_test.477bbebe77f0.js";', + b"import {\n" + b" firstVar1 as firstVarAlias,\n" + b" $second_var_2 as secondVarAlias\n" + b'} from "./module_test.477bbebe77f0.js";', + ] + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + for module_import in tests: + with self.subTest(module_import=module_import): + self.assertIn(module_import, content) + + def test_aggregating_modules(self): + relpath = self.hashed_file_path("cached/module.js") + self.assertEqual(relpath, "cached/module.55fd6938fbc5.js") + tests = [ + b'export * from "./module_test.477bbebe77f0.js";', + b'export { testConst } from "./module_test.477bbebe77f0.js";', + b"export {\n" + b" firstVar as firstVarAlias,\n" + b" secondVar as secondVarAlias\n" + b'} from "./module_test.477bbebe77f0.js";', + ] + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + for module_import in tests: + with self.subTest(module_import=module_import): + self.assertIn(module_import, content) + + +class CustomManifestStorage(storage.ManifestStaticFilesStorage): + def __init__(self, *args, manifest_storage=None, **kwargs): + manifest_storage = storage.StaticFilesStorage( + location=kwargs.pop("manifest_location"), + ) + super().__init__(*args, manifest_storage=manifest_storage, **kwargs) + + +class TestCustomManifestStorage(SimpleTestCase): + def setUp(self): + self.manifest_path = Path(tempfile.mkdtemp()) + self.addCleanup(shutil.rmtree, self.manifest_path) + + self.staticfiles_storage = CustomManifestStorage( + manifest_location=self.manifest_path, + ) + self.manifest_file = self.manifest_path / self.staticfiles_storage.manifest_name + # Manifest without paths. + self.manifest = {"version": self.staticfiles_storage.manifest_version} + with self.manifest_file.open("w") as manifest_file: + json.dump(self.manifest, manifest_file) + + def test_read_manifest(self): + self.assertEqual( + self.staticfiles_storage.read_manifest(), + json.dumps(self.manifest), + ) + + def test_read_manifest_nonexistent(self): + os.remove(self.manifest_file) + self.assertIsNone(self.staticfiles_storage.read_manifest()) + + def test_save_manifest_override(self): + self.assertIs(self.manifest_file.exists(), True) + self.staticfiles_storage.save_manifest() + self.assertIs(self.manifest_file.exists(), True) + new_manifest = json.loads(self.staticfiles_storage.read_manifest()) + self.assertIn("paths", new_manifest) + self.assertNotEqual(new_manifest, self.manifest) + + def test_save_manifest_create(self): + os.remove(self.manifest_file) + self.staticfiles_storage.save_manifest() + self.assertIs(self.manifest_file.exists(), True) + new_manifest = json.loads(self.staticfiles_storage.read_manifest()) + self.assertIn("paths", new_manifest) + self.assertNotEqual(new_manifest, self.manifest) + + +class CustomStaticFilesStorage(storage.StaticFilesStorage): + """ + Used in TestStaticFilePermissions + """ + + def __init__(self, *args, **kwargs): + kwargs["file_permissions_mode"] = 0o640 + kwargs["directory_permissions_mode"] = 0o740 + super().__init__(*args, **kwargs) + + +@unittest.skipIf(sys.platform == "win32", "Windows only partially supports chmod.") +class TestStaticFilePermissions(CollectionTestCase): + command_params = { + "interactive": False, + "verbosity": 0, + "ignore_patterns": ["*.ignoreme"], + } + + def setUp(self): + self.umask = 0o027 + self.old_umask = os.umask(self.umask) + super().setUp() + + def tearDown(self): + os.umask(self.old_umask) + super().tearDown() + + # Don't run collectstatic command in this test class. + def run_collectstatic(self, **kwargs): + pass + + @override_settings( + FILE_UPLOAD_PERMISSIONS=0o655, + FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765, + ) + def test_collect_static_files_permissions(self): + call_command("collectstatic", **self.command_params) + static_root = Path(settings.STATIC_ROOT) + test_file = static_root / "test.txt" + file_mode = test_file.stat().st_mode & 0o777 + self.assertEqual(file_mode, 0o655) + tests = [ + static_root / "subdir", + static_root / "nested", + static_root / "nested" / "css", + ] + for directory in tests: + with self.subTest(directory=directory): + dir_mode = directory.stat().st_mode & 0o777 + self.assertEqual(dir_mode, 0o765) + + @override_settings( + FILE_UPLOAD_PERMISSIONS=None, + FILE_UPLOAD_DIRECTORY_PERMISSIONS=None, + ) + def test_collect_static_files_default_permissions(self): + call_command("collectstatic", **self.command_params) + static_root = Path(settings.STATIC_ROOT) + test_file = static_root / "test.txt" + file_mode = test_file.stat().st_mode & 0o777 + self.assertEqual(file_mode, 0o666 & ~self.umask) + tests = [ + static_root / "subdir", + static_root / "nested", + static_root / "nested" / "css", + ] + for directory in tests: + with self.subTest(directory=directory): + dir_mode = directory.stat().st_mode & 0o777 + self.assertEqual(dir_mode, 0o777 & ~self.umask) + + @override_settings( + FILE_UPLOAD_PERMISSIONS=0o655, + FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765, + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.test_storage.CustomStaticFilesStorage", + }, + }, + ) + def test_collect_static_files_subclass_of_static_storage(self): + call_command("collectstatic", **self.command_params) + static_root = Path(settings.STATIC_ROOT) + test_file = static_root / "test.txt" + file_mode = test_file.stat().st_mode & 0o777 + self.assertEqual(file_mode, 0o640) + tests = [ + static_root / "subdir", + static_root / "nested", + static_root / "nested" / "css", + ] + for directory in tests: + with self.subTest(directory=directory): + dir_mode = directory.stat().st_mode & 0o777 + self.assertEqual(dir_mode, 0o740) + + +@override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage", + }, + } +) +class TestCollectionHashedFilesCache(CollectionTestCase): + """ + Files referenced from CSS use the correct final hashed name regardless of + the order in which the files are post-processed. + """ + + hashed_file_path = hashed_file_path + + def setUp(self): + super().setUp() + self._temp_dir = temp_dir = tempfile.mkdtemp() + os.makedirs(os.path.join(temp_dir, "test")) + self.addCleanup(shutil.rmtree, temp_dir) + + def _get_filename_path(self, filename): + return os.path.join(self._temp_dir, "test", filename) + + def test_file_change_after_collectstatic(self): + # Create initial static files. + file_contents = ( + ("foo.png", "foo"), + ("bar.css", 'url("foo.png")\nurl("xyz.png")'), + ("xyz.png", "xyz"), + ) + for filename, content in file_contents: + with open(self._get_filename_path(filename), "w") as f: + f.write(content) + + with self.modify_settings(STATICFILES_DIRS={"append": self._temp_dir}): + finders.get_finder.cache_clear() + err = StringIO() + # First collectstatic run. + call_command("collectstatic", interactive=False, verbosity=0, stderr=err) + relpath = self.hashed_file_path("test/bar.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertIn(b"foo.acbd18db4cc2.png", content) + self.assertIn(b"xyz.d16fb36f0911.png", content) + + # Change the contents of the png files. + for filename in ("foo.png", "xyz.png"): + with open(self._get_filename_path(filename), "w+b") as f: + f.write(b"new content of file to change its hash") + + # The hashes of the png files in the CSS file are updated after + # a second collectstatic. + call_command("collectstatic", interactive=False, verbosity=0, stderr=err) + relpath = self.hashed_file_path("test/bar.css") + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + self.assertIn(b"foo.57a5cb9ba68d.png", content) + self.assertIn(b"xyz.57a5cb9ba68d.png", content) diff --git a/testbed/django__django/tests/staticfiles_tests/test_templatetags.py b/testbed/django__django/tests/staticfiles_tests/test_templatetags.py new file mode 100644 index 0000000000000000000000000000000000000000..9b8bcf1938dfd78f45efba8faac059309378debc --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/test_templatetags.py @@ -0,0 +1,29 @@ +from django.conf import STATICFILES_STORAGE_ALIAS +from django.test import override_settings + +from .cases import StaticFilesTestCase + + +class TestTemplateTag(StaticFilesTestCase): + def test_template_tag(self): + self.assertStaticRenders("does/not/exist.png", "/static/does/not/exist.png") + self.assertStaticRenders("testfile.txt", "/static/testfile.txt") + self.assertStaticRenders( + "special?chars"ed.html", "/static/special%3Fchars%26quoted.html" + ) + + @override_settings( + STORAGES={ + STATICFILES_STORAGE_ALIAS: { + "BACKEND": "staticfiles_tests.storage.QueryStringStorage" + }, + } + ) + def test_template_tag_escapes(self): + """ + Storage.url() should return an encoded path and might be overridden + to also include a querystring. {% static %} escapes the URL to avoid + raw '&', for example. + """ + self.assertStaticRenders("a.html", "a.html?a=b&c=d") + self.assertStaticRenders("a.html", "a.html?a=b&c=d", autoescape=False) diff --git a/testbed/django__django/tests/staticfiles_tests/test_utils.py b/testbed/django__django/tests/staticfiles_tests/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..5acd406b68d9a8321d1c05603abecb4e90a3eb87 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/test_utils.py @@ -0,0 +1,13 @@ +from django.contrib.staticfiles.utils import check_settings +from django.core.exceptions import ImproperlyConfigured +from django.test import SimpleTestCase, override_settings + + +class CheckSettingsTests(SimpleTestCase): + @override_settings(DEBUG=True, MEDIA_URL="static/media/", STATIC_URL="static/") + def test_media_url_in_static_url(self): + msg = "runserver can't serve media if MEDIA_URL is within STATIC_URL." + with self.assertRaisesMessage(ImproperlyConfigured, msg): + check_settings() + with self.settings(DEBUG=False): # Check disabled if DEBUG=False. + check_settings() diff --git a/testbed/django__django/tests/staticfiles_tests/test_views.py b/testbed/django__django/tests/staticfiles_tests/test_views.py new file mode 100644 index 0000000000000000000000000000000000000000..44c0cfe17f2348050c3990443a0a08f6f47636a6 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/test_views.py @@ -0,0 +1,47 @@ +import posixpath +from urllib.parse import quote + +from django.conf import settings +from django.test import override_settings + +from .cases import StaticFilesTestCase, TestDefaults + + +@override_settings(ROOT_URLCONF="staticfiles_tests.urls.default") +class TestServeStatic(StaticFilesTestCase): + """ + Test static asset serving view. + """ + + def _response(self, filepath): + return self.client.get(quote(posixpath.join(settings.STATIC_URL, filepath))) + + def assertFileContains(self, filepath, text): + self.assertContains(self._response(filepath), text) + + def assertFileNotFound(self, filepath): + self.assertEqual(self._response(filepath).status_code, 404) + + +@override_settings(DEBUG=False) +class TestServeDisabled(TestServeStatic): + """ + Test serving static files disabled when DEBUG is False. + """ + + def test_disabled_serving(self): + self.assertFileNotFound("test.txt") + + +@override_settings(DEBUG=True) +class TestServeStaticWithDefaultURL(TestDefaults, TestServeStatic): + """ + Test static asset serving view with manually configured URLconf. + """ + + +@override_settings(DEBUG=True, ROOT_URLCONF="staticfiles_tests.urls.helper") +class TestServeStaticWithURLHelper(TestDefaults, TestServeStatic): + """ + Test static asset serving view with staticfiles_urlpatterns helper. + """ diff --git a/testbed/django__django/tests/staticfiles_tests/urls/__init__.py b/testbed/django__django/tests/staticfiles_tests/urls/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/staticfiles_tests/urls/default.py b/testbed/django__django/tests/staticfiles_tests/urls/default.py new file mode 100644 index 0000000000000000000000000000000000000000..a7345cf4557a3b9d78dac97319b75881328be1cf --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/urls/default.py @@ -0,0 +1,6 @@ +from django.contrib.staticfiles import views +from django.urls import re_path + +urlpatterns = [ + re_path("^static/(?P.*)$", views.serve), +] diff --git a/testbed/django__django/tests/staticfiles_tests/urls/helper.py b/testbed/django__django/tests/staticfiles_tests/urls/helper.py new file mode 100644 index 0000000000000000000000000000000000000000..e4951d1afbe36277e67b46c07ffad21b3e5d4961 --- /dev/null +++ b/testbed/django__django/tests/staticfiles_tests/urls/helper.py @@ -0,0 +1,3 @@ +from django.contrib.staticfiles.urls import staticfiles_urlpatterns + +urlpatterns = staticfiles_urlpatterns() diff --git a/testbed/django__django/tests/str/__init__.py b/testbed/django__django/tests/str/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/str/models.py b/testbed/django__django/tests/str/models.py new file mode 100644 index 0000000000000000000000000000000000000000..d2f7a45c20457ac7a1e72c6c259455c01070fc98 --- /dev/null +++ b/testbed/django__django/tests/str/models.py @@ -0,0 +1,19 @@ +""" +Adding __str__() to models + +Although it's not a strict requirement, each model should have a ``_str__()`` +method to return a "human-readable" representation of the object. Do this not +only for your own sanity when dealing with the interactive prompt, but also +because objects' representations are used throughout Django's +automatically-generated admin. +""" + +from django.db import models + + +class InternationalArticle(models.Model): + headline = models.CharField(max_length=100) + pub_date = models.DateTimeField() + + def __str__(self): + return self.headline diff --git a/testbed/django__django/tests/str/tests.py b/testbed/django__django/tests/str/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..6213af63cda255348be3e0cd4f2680ab2bcc5261 --- /dev/null +++ b/testbed/django__django/tests/str/tests.py @@ -0,0 +1,37 @@ +import datetime + +from django.db import models +from django.test import TestCase +from django.test.utils import isolate_apps + +from .models import InternationalArticle + + +class SimpleTests(TestCase): + def test_international(self): + a = InternationalArticle.objects.create( + headline="Girl wins €12.500 in lottery", + pub_date=datetime.datetime(2005, 7, 28), + ) + self.assertEqual(str(a), "Girl wins €12.500 in lottery") + + @isolate_apps("str") + def test_defaults(self): + """ + The default implementation of __str__ and __repr__ should return + instances of str. + """ + + class Default(models.Model): + pass + + obj = Default() + # Explicit call to __str__/__repr__ to make sure str()/repr() don't + # coerce the returned value. + self.assertIsInstance(obj.__str__(), str) + self.assertIsInstance(obj.__repr__(), str) + self.assertEqual(str(obj), "Default object (None)") + self.assertEqual(repr(obj), "") + obj2 = Default(pk=100) + self.assertEqual(str(obj2), "Default object (100)") + self.assertEqual(repr(obj2), "") diff --git a/testbed/django__django/tests/string_lookup/__init__.py b/testbed/django__django/tests/string_lookup/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/string_lookup/models.py b/testbed/django__django/tests/string_lookup/models.py new file mode 100644 index 0000000000000000000000000000000000000000..71510f5b2fb1732b0c5e97f4b1f5f22c759b4674 --- /dev/null +++ b/testbed/django__django/tests/string_lookup/models.py @@ -0,0 +1,32 @@ +from django.db import models + + +class Foo(models.Model): + name = models.CharField(max_length=50) + friend = models.CharField(max_length=50, blank=True) + + +class Bar(models.Model): + name = models.CharField(max_length=50) + normal = models.ForeignKey(Foo, models.CASCADE, related_name="normal_foo") + fwd = models.ForeignKey("Whiz", models.CASCADE) + back = models.ForeignKey("Foo", models.CASCADE) + + +class Whiz(models.Model): + name = models.CharField(max_length=50) + + +class Child(models.Model): + parent = models.OneToOneField("Base", models.CASCADE) + name = models.CharField(max_length=50) + + +class Base(models.Model): + name = models.CharField(max_length=50) + + +class Article(models.Model): + name = models.CharField(max_length=50) + text = models.TextField() + submitted_from = models.GenericIPAddressField(blank=True, null=True) diff --git a/testbed/django__django/tests/string_lookup/tests.py b/testbed/django__django/tests/string_lookup/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..cc7d36061ac67045342dd64ed30139b30c7abefc --- /dev/null +++ b/testbed/django__django/tests/string_lookup/tests.py @@ -0,0 +1,85 @@ +from django.test import TestCase + +from .models import Article, Bar, Base, Child, Foo, Whiz + + +class StringLookupTests(TestCase): + def test_string_form_referencing(self): + """ + Regression test for #1661 and #1662 + + String form referencing of models works, both as pre and post + reference, on all RelatedField types. + """ + + f1 = Foo(name="Foo1") + f1.save() + f2 = Foo(name="Foo2") + f2.save() + + w1 = Whiz(name="Whiz1") + w1.save() + + b1 = Bar(name="Bar1", normal=f1, fwd=w1, back=f2) + b1.save() + + self.assertEqual(b1.normal, f1) + + self.assertEqual(b1.fwd, w1) + + self.assertEqual(b1.back, f2) + + base1 = Base(name="Base1") + base1.save() + + child1 = Child(name="Child1", parent=base1) + child1.save() + + self.assertEqual(child1.parent, base1) + + def test_unicode_chars_in_queries(self): + """ + Regression tests for #3937 + + make sure we can use unicode characters in queries. + If these tests fail on MySQL, it's a problem with the test setup. + A properly configured UTF-8 database can handle this. + """ + + fx = Foo(name="Bjorn", friend="François") + fx.save() + self.assertEqual(Foo.objects.get(friend__contains="\xe7"), fx) + + def test_queries_on_textfields(self): + """ + Regression tests for #5087 + + make sure we can perform queries on TextFields. + """ + + a = Article(name="Test", text="The quick brown fox jumps over the lazy dog.") + a.save() + self.assertEqual( + Article.objects.get( + text__exact="The quick brown fox jumps over the lazy dog." + ), + a, + ) + + self.assertEqual(Article.objects.get(text__contains="quick brown fox"), a) + + def test_ipaddress_on_postgresql(self): + """ + Regression test for #708 + + "like" queries on IP address fields require casting with HOST() (on PostgreSQL). + """ + a = Article(name="IP test", text="The body", submitted_from="192.0.2.100") + a.save() + self.assertSequenceEqual( + Article.objects.filter(submitted_from__contains="192.0.2"), [a] + ) + # The searches do not match the subnet mask (/32 in this case) + self.assertEqual( + Article.objects.filter(submitted_from__contains="32").count(), 0 + ) diff --git a/testbed/django__django/tests/swappable_models/__init__.py b/testbed/django__django/tests/swappable_models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/swappable_models/models.py b/testbed/django__django/tests/swappable_models/models.py new file mode 100644 index 0000000000000000000000000000000000000000..02a1eb99c0c1db077c1a21dcdc0e0d0e6972f724 --- /dev/null +++ b/testbed/django__django/tests/swappable_models/models.py @@ -0,0 +1,15 @@ +from django.db import models + + +class Article(models.Model): + title = models.CharField(max_length=100) + publication_date = models.DateField() + + class Meta: + swappable = "TEST_ARTICLE_MODEL" + + +class AlternateArticle(models.Model): + title = models.CharField(max_length=100) + publication_date = models.DateField() + byline = models.CharField(max_length=100) diff --git a/testbed/django__django/tests/swappable_models/tests.py b/testbed/django__django/tests/swappable_models/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..d2c28b20b11132e392cd0c09961d104fd4eedfd5 --- /dev/null +++ b/testbed/django__django/tests/swappable_models/tests.py @@ -0,0 +1,45 @@ +from django.contrib.auth.models import Permission +from django.contrib.contenttypes.models import ContentType +from django.core import management +from django.test import TestCase, override_settings + +from .models import Article + + +class SwappableModelTests(TestCase): + # Limit memory usage when calling 'migrate'. + available_apps = [ + "swappable_models", + "django.contrib.auth", + "django.contrib.contenttypes", + ] + + @override_settings(TEST_ARTICLE_MODEL="swappable_models.AlternateArticle") + def test_generated_data(self): + "Permissions and content types are not created for a swapped model" + + # Delete all permissions and content_types + Permission.objects.filter(content_type__app_label="swappable_models").delete() + ContentType.objects.filter(app_label="swappable_models").delete() + + # Re-run migrate. This will re-build the permissions and content types. + management.call_command("migrate", interactive=False, verbosity=0) + + # Content types and permissions exist for the swapped model, + # but not for the swappable model. + apps_models = [ + (p.content_type.app_label, p.content_type.model) + for p in Permission.objects.all() + ] + self.assertIn(("swappable_models", "alternatearticle"), apps_models) + self.assertNotIn(("swappable_models", "article"), apps_models) + + apps_models = [(ct.app_label, ct.model) for ct in ContentType.objects.all()] + self.assertIn(("swappable_models", "alternatearticle"), apps_models) + self.assertNotIn(("swappable_models", "article"), apps_models) + + @override_settings(TEST_ARTICLE_MODEL="swappable_models.article") + def test_case_insensitive(self): + "Model names are case insensitive. Model swapping honors this." + Article.objects.all() + self.assertIsNone(Article._meta.swapped) diff --git a/testbed/django__django/tests/syndication_tests/__init__.py b/testbed/django__django/tests/syndication_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/syndication_tests/feeds.py b/testbed/django__django/tests/syndication_tests/feeds.py new file mode 100644 index 0000000000000000000000000000000000000000..a35dc29e20900bf550a7097e04b8d696b2e9fd06 --- /dev/null +++ b/testbed/django__django/tests/syndication_tests/feeds.py @@ -0,0 +1,316 @@ +from functools import wraps + +from django.contrib.syndication import views +from django.utils import feedgenerator +from django.utils.timezone import get_fixed_timezone + +from .models import Article, Entry + + +def wraps_decorator(f): + @wraps(f) + def wrapper(*args, **kwargs): + value = f(*args, **kwargs) + return f"{value} -- decorated by @wraps." + + return wrapper + + +def common_decorator(f): + def wrapper(*args, **kwargs): + value = f(*args, **kwargs) + return f"{value} -- common decorated." + + return wrapper + + +class TestRss2Feed(views.Feed): + title = "My blog" + description = "A more thorough description of my blog." + link = "/blog/" + feed_guid = "/foo/bar/1234" + author_name = "Sally Smith" + author_email = "test@example.com" + author_link = "http://www.example.com/" + categories = ("python", "django") + feed_copyright = "Copyright (c) 2007, Sally Smith" + ttl = 600 + + def items(self): + return Entry.objects.all() + + def item_description(self, item): + return "Overridden description: %s" % item + + def item_pubdate(self, item): + return item.published + + def item_updateddate(self, item): + return item.updated + + def item_comments(self, item): + return "%scomments" % item.get_absolute_url() + + item_author_name = "Sally Smith" + item_author_email = "test@example.com" + item_author_link = "http://www.example.com/" + item_categories = ("python", "testing") + item_copyright = "Copyright (c) 2007, Sally Smith" + + +class TestRss2FeedWithCallableObject(TestRss2Feed): + class TimeToLive: + def __call__(self): + return 700 + + ttl = TimeToLive() + + +class TestRss2FeedWithDecoratedMethod(TestRss2Feed): + class TimeToLive: + @wraps_decorator + def __call__(self): + return 800 + + @staticmethod + @wraps_decorator + def feed_copyright(): + return "Copyright (c) 2022, John Doe" + + ttl = TimeToLive() + + @staticmethod + def categories(): + return ("javascript", "vue") + + @wraps_decorator + def title(self): + return "Overridden title" + + @wraps_decorator + def item_title(self, item): + return f"Overridden item title: {item.title}" + + @wraps_decorator + def description(self, obj): + return "Overridden description" + + @wraps_decorator + def item_description(self): + return "Overridden item description" + + +class TestRss2FeedWithWrongDecoratedMethod(TestRss2Feed): + @common_decorator + def item_description(self, item): + return f"Overridden item description: {item.title}" + + +class TestRss2FeedWithGuidIsPermaLinkTrue(TestRss2Feed): + def item_guid_is_permalink(self, item): + return True + + +class TestRss2FeedWithGuidIsPermaLinkFalse(TestRss2Feed): + def item_guid(self, item): + return str(item.pk) + + def item_guid_is_permalink(self, item): + return False + + +class TestRss091Feed(TestRss2Feed): + feed_type = feedgenerator.RssUserland091Feed + + +class TestNoPubdateFeed(views.Feed): + title = "Test feed" + link = "/feed/" + + def items(self): + return Entry.objects.all() + + +class TestAtomFeed(TestRss2Feed): + feed_type = feedgenerator.Atom1Feed + subtitle = TestRss2Feed.description + + +class TestLatestFeed(TestRss2Feed): + """ + A feed where the latest entry date is an `updated` element. + """ + + feed_type = feedgenerator.Atom1Feed + subtitle = TestRss2Feed.description + + def items(self): + return Entry.objects.exclude(title="My last entry") + + +class ArticlesFeed(TestRss2Feed): + """ + A feed to test no link being defined. Articles have no get_absolute_url() + method, and item_link() is not defined. + """ + + def items(self): + return Article.objects.all() + + +class TestSingleEnclosureRSSFeed(TestRss2Feed): + """ + A feed to test that RSS feeds work with a single enclosure. + """ + + def item_enclosure_url(self, item): + return "http://example.com" + + def item_enclosure_size(self, item): + return 0 + + def item_mime_type(self, item): + return "image/png" + + +class TestMultipleEnclosureRSSFeed(TestRss2Feed): + """ + A feed to test that RSS feeds raise an exception with multiple enclosures. + """ + + def item_enclosures(self, item): + return [ + feedgenerator.Enclosure("http://example.com/hello.png", 0, "image/png"), + feedgenerator.Enclosure("http://example.com/goodbye.png", 0, "image/png"), + ] + + +class TemplateFeed(TestRss2Feed): + """ + A feed to test defining item titles and descriptions with templates. + """ + + title_template = "syndication/title.html" + description_template = "syndication/description.html" + + # Defining a template overrides any item_title definition + def item_title(self): + return "Not in a template" + + +class TemplateContextFeed(TestRss2Feed): + """ + A feed to test custom context data in templates for title or description. + """ + + title_template = "syndication/title_context.html" + description_template = "syndication/description_context.html" + + def get_context_data(self, **kwargs): + context = super().get_context_data(**kwargs) + context["foo"] = "bar" + return context + + +class TestLanguageFeed(TestRss2Feed): + language = "de" + + +class TestGetObjectFeed(TestRss2Feed): + def get_object(self, request, entry_id): + return Entry.objects.get(pk=entry_id) + + def items(self, obj): + return Article.objects.filter(entry=obj) + + def item_link(self, item): + return "%sarticle/%s/" % (item.entry.get_absolute_url(), item.pk) + + def item_comments(self, item): + return "%scomments" % self.item_link(item) + + def item_description(self, item): + return "Article description: %s" % item.title + + def item_title(self, item): + return "Title: %s" % item.title + + +class NaiveDatesFeed(TestAtomFeed): + """ + A feed with naive (non-timezone-aware) dates. + """ + + def item_pubdate(self, item): + return item.published + + +class TZAwareDatesFeed(TestAtomFeed): + """ + A feed with timezone-aware dates. + """ + + def item_pubdate(self, item): + # Provide a weird offset so that the test can know it's getting this + # specific offset and not accidentally getting on from + # settings.TIME_ZONE. + return item.published.replace(tzinfo=get_fixed_timezone(42)) + + +class TestFeedUrlFeed(TestAtomFeed): + feed_url = "http://example.com/customfeedurl/" + + +class MyCustomAtom1Feed(feedgenerator.Atom1Feed): + """ + Test of a custom feed generator class. + """ + + def root_attributes(self): + attrs = super().root_attributes() + attrs["django"] = "rocks" + return attrs + + def add_root_elements(self, handler): + super().add_root_elements(handler) + handler.addQuickElement("spam", "eggs") + + def item_attributes(self, item): + attrs = super().item_attributes(item) + attrs["bacon"] = "yum" + return attrs + + def add_item_elements(self, handler, item): + super().add_item_elements(handler, item) + handler.addQuickElement("ministry", "silly walks") + + +class TestCustomFeed(TestAtomFeed): + feed_type = MyCustomAtom1Feed + + +class TestSingleEnclosureAtomFeed(TestAtomFeed): + """ + A feed to test that Atom feeds work with a single enclosure. + """ + + def item_enclosure_url(self, item): + return "http://example.com" + + def item_enclosure_size(self, item): + return 0 + + def item_mime_type(self, item): + return "image/png" + + +class TestMultipleEnclosureAtomFeed(TestAtomFeed): + """ + A feed to test that Atom feeds work with multiple enclosures. + """ + + def item_enclosures(self, item): + return [ + feedgenerator.Enclosure("http://example.com/hello.png", "0", "image/png"), + feedgenerator.Enclosure("http://example.com/goodbye.png", "0", "image/png"), + ] diff --git a/testbed/django__django/tests/syndication_tests/models.py b/testbed/django__django/tests/syndication_tests/models.py new file mode 100644 index 0000000000000000000000000000000000000000..9bf1799c82fd5341336b92a6fbd44a6de81ed6b1 --- /dev/null +++ b/testbed/django__django/tests/syndication_tests/models.py @@ -0,0 +1,26 @@ +from django.db import models + + +class Entry(models.Model): + title = models.CharField(max_length=200) + updated = models.DateTimeField() + published = models.DateTimeField() + + class Meta: + ordering = ("updated",) + + def __str__(self): + return self.title + + def get_absolute_url(self): + return "/blog/%s/" % self.pk + + +class Article(models.Model): + title = models.CharField(max_length=200) + entry = models.ForeignKey(Entry, models.CASCADE) + updated = models.DateTimeField() + published = models.DateTimeField() + + class Meta: + ordering = ["updated"] diff --git a/testbed/django__django/tests/syndication_tests/templates/syndication/description.html b/testbed/django__django/tests/syndication_tests/templates/syndication/description.html new file mode 100644 index 0000000000000000000000000000000000000000..850e85e9524b01523802184a2663181e302d68c1 --- /dev/null +++ b/testbed/django__django/tests/syndication_tests/templates/syndication/description.html @@ -0,0 +1 @@ +Description in your templates: {{ obj }} diff --git a/testbed/django__django/tests/syndication_tests/templates/syndication/description_context.html b/testbed/django__django/tests/syndication_tests/templates/syndication/description_context.html new file mode 100644 index 0000000000000000000000000000000000000000..b8872fd637d9675365ba856de6537fbfd8e5d8d9 --- /dev/null +++ b/testbed/django__django/tests/syndication_tests/templates/syndication/description_context.html @@ -0,0 +1 @@ +{{ obj }} (foo is {{ foo }}) diff --git a/testbed/django__django/tests/syndication_tests/templates/syndication/title.html b/testbed/django__django/tests/syndication_tests/templates/syndication/title.html new file mode 100644 index 0000000000000000000000000000000000000000..57744ec49cf4ddf0c4bf338ddb817957ee767b89 --- /dev/null +++ b/testbed/django__django/tests/syndication_tests/templates/syndication/title.html @@ -0,0 +1 @@ +Title in your templates: {{ obj }} diff --git a/testbed/django__django/tests/syndication_tests/templates/syndication/title_context.html b/testbed/django__django/tests/syndication_tests/templates/syndication/title_context.html new file mode 100644 index 0000000000000000000000000000000000000000..b8872fd637d9675365ba856de6537fbfd8e5d8d9 --- /dev/null +++ b/testbed/django__django/tests/syndication_tests/templates/syndication/title_context.html @@ -0,0 +1 @@ +{{ obj }} (foo is {{ foo }}) diff --git a/testbed/django__django/tests/syndication_tests/tests.py b/testbed/django__django/tests/syndication_tests/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..a68ed879db8535deb0ee9057bef63b5e661a898a --- /dev/null +++ b/testbed/django__django/tests/syndication_tests/tests.py @@ -0,0 +1,722 @@ +import datetime +from xml.dom import minidom + +from django.contrib.sites.models import Site +from django.contrib.syndication import views +from django.core.exceptions import ImproperlyConfigured +from django.test import TestCase, override_settings +from django.test.utils import requires_tz_support +from django.utils import timezone +from django.utils.feedgenerator import ( + Atom1Feed, + Rss201rev2Feed, + rfc2822_date, + rfc3339_date, +) + +from .models import Article, Entry + +TZ = timezone.get_default_timezone() + + +class FeedTestCase(TestCase): + @classmethod + def setUpTestData(cls): + cls.e1 = Entry.objects.create( + title="My first entry", + updated=datetime.datetime(1980, 1, 1, 12, 30), + published=datetime.datetime(1986, 9, 25, 20, 15, 00), + ) + cls.e2 = Entry.objects.create( + title="My second entry", + updated=datetime.datetime(2008, 1, 2, 12, 30), + published=datetime.datetime(2006, 3, 17, 18, 0), + ) + cls.e3 = Entry.objects.create( + title="My third entry", + updated=datetime.datetime(2008, 1, 2, 13, 30), + published=datetime.datetime(2005, 6, 14, 10, 45), + ) + cls.e4 = Entry.objects.create( + title="A & B < C > D", + updated=datetime.datetime(2008, 1, 3, 13, 30), + published=datetime.datetime(2005, 11, 25, 12, 11, 23), + ) + cls.e5 = Entry.objects.create( + title="My last entry", + updated=datetime.datetime(2013, 1, 20, 0, 0), + published=datetime.datetime(2013, 3, 25, 20, 0), + ) + cls.a1 = Article.objects.create( + title="My first article", + entry=cls.e1, + updated=datetime.datetime(1986, 11, 21, 9, 12, 18), + published=datetime.datetime(1986, 10, 21, 9, 12, 18), + ) + + def assertChildNodes(self, elem, expected): + actual = {n.nodeName for n in elem.childNodes} + expected = set(expected) + self.assertEqual(actual, expected) + + def assertChildNodeContent(self, elem, expected): + for k, v in expected.items(): + self.assertEqual(elem.getElementsByTagName(k)[0].firstChild.wholeText, v) + + def assertCategories(self, elem, expected): + self.assertEqual( + { + i.firstChild.wholeText + for i in elem.childNodes + if i.nodeName == "category" + }, + set(expected), + ) + + +@override_settings(ROOT_URLCONF="syndication_tests.urls") +class SyndicationFeedTest(FeedTestCase): + """ + Tests for the high-level syndication feed framework. + """ + + @classmethod + def setUpClass(cls): + super().setUpClass() + # This cleanup is necessary because contrib.sites cache + # makes tests interfere with each other, see #11505 + Site.objects.clear_cache() + + def test_rss2_feed(self): + """ + Test the structure and content of feeds generated by Rss201rev2Feed. + """ + response = self.client.get("/syndication/rss2/") + doc = minidom.parseString(response.content) + + # Making sure there's only 1 `rss` element and that the correct + # RSS version was specified. + feed_elem = doc.getElementsByTagName("rss") + self.assertEqual(len(feed_elem), 1) + feed = feed_elem[0] + self.assertEqual(feed.getAttribute("version"), "2.0") + self.assertEqual( + feed.getElementsByTagName("language")[0].firstChild.nodeValue, "en" + ) + + # Making sure there's only one `channel` element w/in the + # `rss` element. + chan_elem = feed.getElementsByTagName("channel") + self.assertEqual(len(chan_elem), 1) + chan = chan_elem[0] + + # Find the last build date + d = Entry.objects.latest("published").published + last_build_date = rfc2822_date(timezone.make_aware(d, TZ)) + + self.assertChildNodes( + chan, + [ + "title", + "link", + "description", + "language", + "lastBuildDate", + "item", + "atom:link", + "ttl", + "copyright", + "category", + ], + ) + self.assertChildNodeContent( + chan, + { + "title": "My blog", + "description": "A more thorough description of my blog.", + "link": "http://example.com/blog/", + "language": "en", + "lastBuildDate": last_build_date, + "ttl": "600", + "copyright": "Copyright (c) 2007, Sally Smith", + }, + ) + self.assertCategories(chan, ["python", "django"]) + + # Ensure the content of the channel is correct + self.assertChildNodeContent( + chan, + { + "title": "My blog", + "link": "http://example.com/blog/", + }, + ) + + # Check feed_url is passed + self.assertEqual( + chan.getElementsByTagName("atom:link")[0].getAttribute("href"), + "http://example.com/syndication/rss2/", + ) + + # Find the pubdate of the first feed item + d = Entry.objects.get(pk=self.e1.pk).published + pub_date = rfc2822_date(timezone.make_aware(d, TZ)) + + items = chan.getElementsByTagName("item") + self.assertEqual(len(items), Entry.objects.count()) + self.assertChildNodeContent( + items[0], + { + "title": "My first entry", + "description": "Overridden description: My first entry", + "link": "http://example.com/blog/%s/" % self.e1.pk, + "guid": "http://example.com/blog/%s/" % self.e1.pk, + "pubDate": pub_date, + "author": "test@example.com (Sally Smith)", + "comments": "/blog/%s/comments" % self.e1.pk, + }, + ) + self.assertCategories(items[0], ["python", "testing"]) + for item in items: + self.assertChildNodes( + item, + [ + "title", + "link", + "description", + "guid", + "category", + "pubDate", + "author", + "comments", + ], + ) + # Assert that does not have any 'isPermaLink' attribute + self.assertIsNone( + item.getElementsByTagName("guid")[0].attributes.get("isPermaLink") + ) + + def test_rss2_feed_with_callable_object(self): + response = self.client.get("/syndication/rss2/with-callable-object/") + doc = minidom.parseString(response.content) + chan = doc.getElementsByTagName("rss")[0].getElementsByTagName("channel")[0] + self.assertChildNodeContent(chan, {"ttl": "700"}) + + def test_rss2_feed_with_decorated_methods(self): + response = self.client.get("/syndication/rss2/with-decorated-methods/") + doc = minidom.parseString(response.content) + chan = doc.getElementsByTagName("rss")[0].getElementsByTagName("channel")[0] + self.assertCategories(chan, ["javascript", "vue"]) + self.assertChildNodeContent( + chan, + { + "title": "Overridden title -- decorated by @wraps.", + "description": "Overridden description -- decorated by @wraps.", + "ttl": "800 -- decorated by @wraps.", + "copyright": "Copyright (c) 2022, John Doe -- decorated by @wraps.", + }, + ) + items = chan.getElementsByTagName("item") + self.assertChildNodeContent( + items[0], + { + "title": ( + f"Overridden item title: {self.e1.title} -- decorated by @wraps." + ), + "description": "Overridden item description -- decorated by @wraps.", + }, + ) + + def test_rss2_feed_with_wrong_decorated_methods(self): + msg = ( + "Feed method 'item_description' decorated by 'wrapper' needs to use " + "@functools.wraps." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + self.client.get("/syndication/rss2/with-wrong-decorated-methods/") + + def test_rss2_feed_guid_permalink_false(self): + """ + Test if the 'isPermaLink' attribute of element of an item + in the RSS feed is 'false'. + """ + response = self.client.get("/syndication/rss2/guid_ispermalink_false/") + doc = minidom.parseString(response.content) + chan = doc.getElementsByTagName("rss")[0].getElementsByTagName("channel")[0] + items = chan.getElementsByTagName("item") + for item in items: + self.assertEqual( + item.getElementsByTagName("guid")[0] + .attributes.get("isPermaLink") + .value, + "false", + ) + + def test_rss2_feed_guid_permalink_true(self): + """ + Test if the 'isPermaLink' attribute of element of an item + in the RSS feed is 'true'. + """ + response = self.client.get("/syndication/rss2/guid_ispermalink_true/") + doc = minidom.parseString(response.content) + chan = doc.getElementsByTagName("rss")[0].getElementsByTagName("channel")[0] + items = chan.getElementsByTagName("item") + for item in items: + self.assertEqual( + item.getElementsByTagName("guid")[0] + .attributes.get("isPermaLink") + .value, + "true", + ) + + def test_rss2_single_enclosure(self): + response = self.client.get("/syndication/rss2/single-enclosure/") + doc = minidom.parseString(response.content) + chan = doc.getElementsByTagName("rss")[0].getElementsByTagName("channel")[0] + items = chan.getElementsByTagName("item") + for item in items: + enclosures = item.getElementsByTagName("enclosure") + self.assertEqual(len(enclosures), 1) + + def test_rss2_multiple_enclosures(self): + with self.assertRaisesMessage( + ValueError, + "RSS feed items may only have one enclosure, see " + "http://www.rssboard.org/rss-profile#element-channel-item-enclosure", + ): + self.client.get("/syndication/rss2/multiple-enclosure/") + + def test_rss091_feed(self): + """ + Test the structure and content of feeds generated by RssUserland091Feed. + """ + response = self.client.get("/syndication/rss091/") + doc = minidom.parseString(response.content) + + # Making sure there's only 1 `rss` element and that the correct + # RSS version was specified. + feed_elem = doc.getElementsByTagName("rss") + self.assertEqual(len(feed_elem), 1) + feed = feed_elem[0] + self.assertEqual(feed.getAttribute("version"), "0.91") + + # Making sure there's only one `channel` element w/in the + # `rss` element. + chan_elem = feed.getElementsByTagName("channel") + self.assertEqual(len(chan_elem), 1) + chan = chan_elem[0] + self.assertChildNodes( + chan, + [ + "title", + "link", + "description", + "language", + "lastBuildDate", + "item", + "atom:link", + "ttl", + "copyright", + "category", + ], + ) + + # Ensure the content of the channel is correct + self.assertChildNodeContent( + chan, + { + "title": "My blog", + "link": "http://example.com/blog/", + }, + ) + self.assertCategories(chan, ["python", "django"]) + + # Check feed_url is passed + self.assertEqual( + chan.getElementsByTagName("atom:link")[0].getAttribute("href"), + "http://example.com/syndication/rss091/", + ) + + items = chan.getElementsByTagName("item") + self.assertEqual(len(items), Entry.objects.count()) + self.assertChildNodeContent( + items[0], + { + "title": "My first entry", + "description": "Overridden description: My first entry", + "link": "http://example.com/blog/%s/" % self.e1.pk, + }, + ) + for item in items: + self.assertChildNodes(item, ["title", "link", "description"]) + self.assertCategories(item, []) + + def test_atom_feed(self): + """ + Test the structure and content of feeds generated by Atom1Feed. + """ + response = self.client.get("/syndication/atom/") + feed = minidom.parseString(response.content).firstChild + + self.assertEqual(feed.nodeName, "feed") + self.assertEqual(feed.getAttribute("xmlns"), "http://www.w3.org/2005/Atom") + self.assertChildNodes( + feed, + [ + "title", + "subtitle", + "link", + "id", + "updated", + "entry", + "rights", + "category", + "author", + ], + ) + for link in feed.getElementsByTagName("link"): + if link.getAttribute("rel") == "self": + self.assertEqual( + link.getAttribute("href"), "http://example.com/syndication/atom/" + ) + + entries = feed.getElementsByTagName("entry") + self.assertEqual(len(entries), Entry.objects.count()) + for entry in entries: + self.assertChildNodes( + entry, + [ + "title", + "link", + "id", + "summary", + "category", + "updated", + "published", + "rights", + "author", + ], + ) + summary = entry.getElementsByTagName("summary")[0] + self.assertEqual(summary.getAttribute("type"), "html") + + def test_atom_feed_published_and_updated_elements(self): + """ + The published and updated elements are not + the same and now adhere to RFC 4287. + """ + response = self.client.get("/syndication/atom/") + feed = minidom.parseString(response.content).firstChild + entries = feed.getElementsByTagName("entry") + + published = entries[0].getElementsByTagName("published")[0].firstChild.wholeText + updated = entries[0].getElementsByTagName("updated")[0].firstChild.wholeText + + self.assertNotEqual(published, updated) + + def test_atom_single_enclosure(self): + response = self.client.get("/syndication/atom/single-enclosure/") + feed = minidom.parseString(response.content).firstChild + items = feed.getElementsByTagName("entry") + for item in items: + links = item.getElementsByTagName("link") + links = [link for link in links if link.getAttribute("rel") == "enclosure"] + self.assertEqual(len(links), 1) + + def test_atom_multiple_enclosures(self): + response = self.client.get("/syndication/atom/multiple-enclosure/") + feed = minidom.parseString(response.content).firstChild + items = feed.getElementsByTagName("entry") + for item in items: + links = item.getElementsByTagName("link") + links = [link for link in links if link.getAttribute("rel") == "enclosure"] + self.assertEqual(len(links), 2) + + def test_latest_post_date(self): + """ + Both the published and updated dates are + considered when determining the latest post date. + """ + # this feed has a `published` element with the latest date + response = self.client.get("/syndication/atom/") + feed = minidom.parseString(response.content).firstChild + updated = feed.getElementsByTagName("updated")[0].firstChild.wholeText + + d = Entry.objects.latest("published").published + latest_published = rfc3339_date(timezone.make_aware(d, TZ)) + + self.assertEqual(updated, latest_published) + + # this feed has an `updated` element with the latest date + response = self.client.get("/syndication/latest/") + feed = minidom.parseString(response.content).firstChild + updated = feed.getElementsByTagName("updated")[0].firstChild.wholeText + + d = Entry.objects.exclude(title="My last entry").latest("updated").updated + latest_updated = rfc3339_date(timezone.make_aware(d, TZ)) + + self.assertEqual(updated, latest_updated) + + def test_custom_feed_generator(self): + response = self.client.get("/syndication/custom/") + feed = minidom.parseString(response.content).firstChild + + self.assertEqual(feed.nodeName, "feed") + self.assertEqual(feed.getAttribute("django"), "rocks") + self.assertChildNodes( + feed, + [ + "title", + "subtitle", + "link", + "id", + "updated", + "entry", + "spam", + "rights", + "category", + "author", + ], + ) + + entries = feed.getElementsByTagName("entry") + self.assertEqual(len(entries), Entry.objects.count()) + for entry in entries: + self.assertEqual(entry.getAttribute("bacon"), "yum") + self.assertChildNodes( + entry, + [ + "title", + "link", + "id", + "summary", + "ministry", + "rights", + "author", + "updated", + "published", + "category", + ], + ) + summary = entry.getElementsByTagName("summary")[0] + self.assertEqual(summary.getAttribute("type"), "html") + + def test_feed_generator_language_attribute(self): + response = self.client.get("/syndication/language/") + feed = minidom.parseString(response.content).firstChild + self.assertEqual( + feed.firstChild.getElementsByTagName("language")[0].firstChild.nodeValue, + "de", + ) + + def test_title_escaping(self): + """ + Titles are escaped correctly in RSS feeds. + """ + response = self.client.get("/syndication/rss2/") + doc = minidom.parseString(response.content) + for item in doc.getElementsByTagName("item"): + link = item.getElementsByTagName("link")[0] + if link.firstChild.wholeText == "http://example.com/blog/4/": + title = item.getElementsByTagName("title")[0] + self.assertEqual(title.firstChild.wholeText, "A & B < C > D") + + def test_naive_datetime_conversion(self): + """ + Datetimes are correctly converted to the local time zone. + """ + # Naive date times passed in get converted to the local time zone, so + # check the received zone offset against the local offset. + response = self.client.get("/syndication/naive-dates/") + doc = minidom.parseString(response.content) + updated = doc.getElementsByTagName("updated")[0].firstChild.wholeText + + d = Entry.objects.latest("published").published + latest = rfc3339_date(timezone.make_aware(d, TZ)) + + self.assertEqual(updated, latest) + + def test_aware_datetime_conversion(self): + """ + Datetimes with timezones don't get trodden on. + """ + response = self.client.get("/syndication/aware-dates/") + doc = minidom.parseString(response.content) + published = doc.getElementsByTagName("published")[0].firstChild.wholeText + self.assertEqual(published[-6:], "+00:42") + + def test_feed_no_content_self_closing_tag(self): + tests = [ + (Atom1Feed, "link"), + (Rss201rev2Feed, "atom:link"), + ] + for feedgenerator, tag in tests: + with self.subTest(feedgenerator=feedgenerator.__name__): + feed = feedgenerator( + title="title", + link="https://example.com", + description="self closing tags test", + feed_url="https://feed.url.com", + ) + doc = feed.writeString("utf-8") + self.assertIn(f'<{tag} href="https://feed.url.com" rel="self"/>', doc) + + @requires_tz_support + def test_feed_last_modified_time_naive_date(self): + """ + Tests the Last-Modified header with naive publication dates. + """ + response = self.client.get("/syndication/naive-dates/") + self.assertEqual( + response.headers["Last-Modified"], "Tue, 26 Mar 2013 01:00:00 GMT" + ) + + def test_feed_last_modified_time(self): + """ + Tests the Last-Modified header with aware publication dates. + """ + response = self.client.get("/syndication/aware-dates/") + self.assertEqual( + response.headers["Last-Modified"], "Mon, 25 Mar 2013 19:18:00 GMT" + ) + + # No last-modified when feed has no item_pubdate + response = self.client.get("/syndication/no_pubdate/") + self.assertFalse(response.has_header("Last-Modified")) + + def test_feed_url(self): + """ + The feed_url can be overridden. + """ + response = self.client.get("/syndication/feedurl/") + doc = minidom.parseString(response.content) + for link in doc.getElementsByTagName("link"): + if link.getAttribute("rel") == "self": + self.assertEqual( + link.getAttribute("href"), "http://example.com/customfeedurl/" + ) + + def test_secure_urls(self): + """ + Test URLs are prefixed with https:// when feed is requested over HTTPS. + """ + response = self.client.get( + "/syndication/rss2/", + **{ + "wsgi.url_scheme": "https", + }, + ) + doc = minidom.parseString(response.content) + chan = doc.getElementsByTagName("channel")[0] + self.assertEqual( + chan.getElementsByTagName("link")[0].firstChild.wholeText[0:5], "https" + ) + atom_link = chan.getElementsByTagName("atom:link")[0] + self.assertEqual(atom_link.getAttribute("href")[0:5], "https") + for link in doc.getElementsByTagName("link"): + if link.getAttribute("rel") == "self": + self.assertEqual(link.getAttribute("href")[0:5], "https") + + def test_item_link_error(self): + """ + An ImproperlyConfigured is raised if no link could be found for the + item(s). + """ + msg = ( + "Give your Article class a get_absolute_url() method, or define " + "an item_link() method in your Feed class." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + self.client.get("/syndication/articles/") + + def test_template_feed(self): + """ + The item title and description can be overridden with templates. + """ + response = self.client.get("/syndication/template/") + doc = minidom.parseString(response.content) + feed = doc.getElementsByTagName("rss")[0] + chan = feed.getElementsByTagName("channel")[0] + items = chan.getElementsByTagName("item") + + self.assertChildNodeContent( + items[0], + { + "title": "Title in your templates: My first entry\n", + "description": "Description in your templates: My first entry\n", + "link": "http://example.com/blog/%s/" % self.e1.pk, + }, + ) + + def test_template_context_feed(self): + """ + Custom context data can be passed to templates for title + and description. + """ + response = self.client.get("/syndication/template_context/") + doc = minidom.parseString(response.content) + feed = doc.getElementsByTagName("rss")[0] + chan = feed.getElementsByTagName("channel")[0] + items = chan.getElementsByTagName("item") + + self.assertChildNodeContent( + items[0], + { + "title": "My first entry (foo is bar)\n", + "description": "My first entry (foo is bar)\n", + }, + ) + + def test_add_domain(self): + """ + add_domain() prefixes domains onto the correct URLs. + """ + prefix_domain_mapping = ( + (("example.com", "/foo/?arg=value"), "http://example.com/foo/?arg=value"), + ( + ("example.com", "/foo/?arg=value", True), + "https://example.com/foo/?arg=value", + ), + ( + ("example.com", "http://djangoproject.com/doc/"), + "http://djangoproject.com/doc/", + ), + ( + ("example.com", "https://djangoproject.com/doc/"), + "https://djangoproject.com/doc/", + ), + ( + ("example.com", "mailto:uhoh@djangoproject.com"), + "mailto:uhoh@djangoproject.com", + ), + ( + ("example.com", "//example.com/foo/?arg=value"), + "http://example.com/foo/?arg=value", + ), + ) + for prefix in prefix_domain_mapping: + with self.subTest(prefix=prefix): + self.assertEqual(views.add_domain(*prefix[0]), prefix[1]) + + def test_get_object(self): + response = self.client.get("/syndication/rss2/articles/%s/" % self.e1.pk) + doc = minidom.parseString(response.content) + feed = doc.getElementsByTagName("rss")[0] + chan = feed.getElementsByTagName("channel")[0] + items = chan.getElementsByTagName("item") + + self.assertChildNodeContent( + items[0], + { + "comments": "/blog/%s/article/%s/comments" % (self.e1.pk, self.a1.pk), + "description": "Article description: My first article", + "link": "http://example.com/blog/%s/article/%s/" + % (self.e1.pk, self.a1.pk), + "title": "Title: My first article", + "pubDate": rfc2822_date(timezone.make_aware(self.a1.published, TZ)), + }, + ) + + def test_get_non_existent_object(self): + response = self.client.get("/syndication/rss2/articles/0/") + self.assertEqual(response.status_code, 404) diff --git a/testbed/django__django/tests/syndication_tests/urls.py b/testbed/django__django/tests/syndication_tests/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..50f673373ec4b467ffcf92a2f6d0c13f775686dc --- /dev/null +++ b/testbed/django__django/tests/syndication_tests/urls.py @@ -0,0 +1,43 @@ +from django.urls import path + +from . import feeds + +urlpatterns = [ + path("syndication/rss2/", feeds.TestRss2Feed()), + path( + "syndication/rss2/with-callable-object/", feeds.TestRss2FeedWithCallableObject() + ), + path( + "syndication/rss2/with-decorated-methods/", + feeds.TestRss2FeedWithDecoratedMethod(), + ), + path( + "syndication/rss2/with-wrong-decorated-methods/", + feeds.TestRss2FeedWithWrongDecoratedMethod(), + ), + path("syndication/rss2/articles//", feeds.TestGetObjectFeed()), + path( + "syndication/rss2/guid_ispermalink_true/", + feeds.TestRss2FeedWithGuidIsPermaLinkTrue(), + ), + path( + "syndication/rss2/guid_ispermalink_false/", + feeds.TestRss2FeedWithGuidIsPermaLinkFalse(), + ), + path("syndication/rss091/", feeds.TestRss091Feed()), + path("syndication/no_pubdate/", feeds.TestNoPubdateFeed()), + path("syndication/atom/", feeds.TestAtomFeed()), + path("syndication/latest/", feeds.TestLatestFeed()), + path("syndication/custom/", feeds.TestCustomFeed()), + path("syndication/language/", feeds.TestLanguageFeed()), + path("syndication/naive-dates/", feeds.NaiveDatesFeed()), + path("syndication/aware-dates/", feeds.TZAwareDatesFeed()), + path("syndication/feedurl/", feeds.TestFeedUrlFeed()), + path("syndication/articles/", feeds.ArticlesFeed()), + path("syndication/template/", feeds.TemplateFeed()), + path("syndication/template_context/", feeds.TemplateContextFeed()), + path("syndication/rss2/single-enclosure/", feeds.TestSingleEnclosureRSSFeed()), + path("syndication/rss2/multiple-enclosure/", feeds.TestMultipleEnclosureRSSFeed()), + path("syndication/atom/single-enclosure/", feeds.TestSingleEnclosureAtomFeed()), + path("syndication/atom/multiple-enclosure/", feeds.TestMultipleEnclosureAtomFeed()), +] diff --git a/testbed/django__django/tests/template_backends/__init__.py b/testbed/django__django/tests/template_backends/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/template_backends/apps/__init__.py b/testbed/django__django/tests/template_backends/apps/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/template_backends/apps/good/__init__.py b/testbed/django__django/tests/template_backends/apps/good/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/template_backends/apps/good/templatetags/__init__.py b/testbed/django__django/tests/template_backends/apps/good/templatetags/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/template_backends/apps/good/templatetags/empty.py b/testbed/django__django/tests/template_backends/apps/good/templatetags/empty.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/template_backends/apps/good/templatetags/good_tags.py b/testbed/django__django/tests/template_backends/apps/good/templatetags/good_tags.py new file mode 100644 index 0000000000000000000000000000000000000000..9bec93d8e50a9d43d05b378ba8c50b2b7dd1e658 --- /dev/null +++ b/testbed/django__django/tests/template_backends/apps/good/templatetags/good_tags.py @@ -0,0 +1,3 @@ +from django.template import Library + +register = Library() diff --git a/testbed/django__django/tests/template_backends/apps/good/templatetags/override.py b/testbed/django__django/tests/template_backends/apps/good/templatetags/override.py new file mode 100644 index 0000000000000000000000000000000000000000..9bec93d8e50a9d43d05b378ba8c50b2b7dd1e658 --- /dev/null +++ b/testbed/django__django/tests/template_backends/apps/good/templatetags/override.py @@ -0,0 +1,3 @@ +from django.template import Library + +register = Library() diff --git a/testbed/django__django/tests/template_backends/apps/good/templatetags/subpackage/__init__.py b/testbed/django__django/tests/template_backends/apps/good/templatetags/subpackage/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/template_backends/apps/good/templatetags/subpackage/tags.py b/testbed/django__django/tests/template_backends/apps/good/templatetags/subpackage/tags.py new file mode 100644 index 0000000000000000000000000000000000000000..9bec93d8e50a9d43d05b378ba8c50b2b7dd1e658 --- /dev/null +++ b/testbed/django__django/tests/template_backends/apps/good/templatetags/subpackage/tags.py @@ -0,0 +1,3 @@ +from django.template import Library + +register = Library() diff --git a/testbed/django__django/tests/template_backends/apps/importerror/__init__.py b/testbed/django__django/tests/template_backends/apps/importerror/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/template_backends/apps/importerror/templatetags/__init__.py b/testbed/django__django/tests/template_backends/apps/importerror/templatetags/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/template_backends/apps/importerror/templatetags/broken_tags.py b/testbed/django__django/tests/template_backends/apps/importerror/templatetags/broken_tags.py new file mode 100644 index 0000000000000000000000000000000000000000..4f240e592330d0adfb165a8994da2e98f633873e --- /dev/null +++ b/testbed/django__django/tests/template_backends/apps/importerror/templatetags/broken_tags.py @@ -0,0 +1 @@ +import DoesNotExist # noqa diff --git a/testbed/django__django/tests/template_backends/forbidden/template_backends/hello.html b/testbed/django__django/tests/template_backends/forbidden/template_backends/hello.html new file mode 100644 index 0000000000000000000000000000000000000000..14240a43ee9ba14c308d1745a5f0e00539bd0270 --- /dev/null +++ b/testbed/django__django/tests/template_backends/forbidden/template_backends/hello.html @@ -0,0 +1 @@ +Hu ho. diff --git a/testbed/django__django/tests/template_backends/jinja2/template_backends/csrf.html b/testbed/django__django/tests/template_backends/jinja2/template_backends/csrf.html new file mode 100644 index 0000000000000000000000000000000000000000..081577fe3ae2a6ad9ecf4b9fbb1b4feb42949b7c --- /dev/null +++ b/testbed/django__django/tests/template_backends/jinja2/template_backends/csrf.html @@ -0,0 +1 @@ +{{ csrf_input }} diff --git a/testbed/django__django/tests/template_backends/jinja2/template_backends/django_escaping.html b/testbed/django__django/tests/template_backends/jinja2/template_backends/django_escaping.html new file mode 100644 index 0000000000000000000000000000000000000000..a5ce51b109deb1ce436203c7e34427cd3b60ff51 --- /dev/null +++ b/testbed/django__django/tests/template_backends/jinja2/template_backends/django_escaping.html @@ -0,0 +1,5 @@ +{{ media }} + +{{ test_form }} + +{{ test_form.test_field }} diff --git a/testbed/django__django/tests/template_backends/jinja2/template_backends/hello.html b/testbed/django__django/tests/template_backends/jinja2/template_backends/hello.html new file mode 100644 index 0000000000000000000000000000000000000000..4ce626e9be1315b1487388c1eeac2e4da34a803e --- /dev/null +++ b/testbed/django__django/tests/template_backends/jinja2/template_backends/hello.html @@ -0,0 +1 @@ +Hello {{ name }}! diff --git a/testbed/django__django/tests/template_backends/jinja2/template_backends/syntax_error.html b/testbed/django__django/tests/template_backends/jinja2/template_backends/syntax_error.html new file mode 100644 index 0000000000000000000000000000000000000000..d4f7a578fc790e3ad7979c20fb0a12dea6215d91 --- /dev/null +++ b/testbed/django__django/tests/template_backends/jinja2/template_backends/syntax_error.html @@ -0,0 +1 @@ +{% block %} diff --git a/testbed/django__django/tests/template_backends/jinja2/template_backends/syntax_error2.html b/testbed/django__django/tests/template_backends/jinja2/template_backends/syntax_error2.html new file mode 100644 index 0000000000000000000000000000000000000000..7b268bd30c536fe7710ef80722e819c5a715bd73 --- /dev/null +++ b/testbed/django__django/tests/template_backends/jinja2/template_backends/syntax_error2.html @@ -0,0 +1,31 @@ +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +{% block %} +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 diff --git a/testbed/django__django/tests/template_backends/jinja2/template_backends/syntax_error_include.html b/testbed/django__django/tests/template_backends/jinja2/template_backends/syntax_error_include.html new file mode 100644 index 0000000000000000000000000000000000000000..e23b0e78d34475ea583e0806e79990aa81d9cba6 --- /dev/null +++ b/testbed/django__django/tests/template_backends/jinja2/template_backends/syntax_error_include.html @@ -0,0 +1 @@ +{% include "template_backends/syntax_error.html" %} diff --git a/testbed/django__django/tests/template_backends/template_strings/template_backends/csrf.html b/testbed/django__django/tests/template_backends/template_strings/template_backends/csrf.html new file mode 100644 index 0000000000000000000000000000000000000000..b1c144dcb7d9455cce092d3503426108077f6a2a --- /dev/null +++ b/testbed/django__django/tests/template_backends/template_strings/template_backends/csrf.html @@ -0,0 +1 @@ +$csrf_input diff --git a/testbed/django__django/tests/template_backends/template_strings/template_backends/hello.html b/testbed/django__django/tests/template_backends/template_strings/template_backends/hello.html new file mode 100644 index 0000000000000000000000000000000000000000..7a1b4446c73093e096fb03776403906f83b3abd6 --- /dev/null +++ b/testbed/django__django/tests/template_backends/template_strings/template_backends/hello.html @@ -0,0 +1 @@ +Hello $name! diff --git a/testbed/django__django/tests/template_backends/templates/template_backends/csrf.html b/testbed/django__django/tests/template_backends/templates/template_backends/csrf.html new file mode 100644 index 0000000000000000000000000000000000000000..a8367c4f081592f6e25b1c77c5f70a4f4811303a --- /dev/null +++ b/testbed/django__django/tests/template_backends/templates/template_backends/csrf.html @@ -0,0 +1 @@ +{% csrf_token %} diff --git a/testbed/django__django/tests/template_backends/templates/template_backends/django_escaping.html b/testbed/django__django/tests/template_backends/templates/template_backends/django_escaping.html new file mode 100644 index 0000000000000000000000000000000000000000..a5ce51b109deb1ce436203c7e34427cd3b60ff51 --- /dev/null +++ b/testbed/django__django/tests/template_backends/templates/template_backends/django_escaping.html @@ -0,0 +1,5 @@ +{{ media }} + +{{ test_form }} + +{{ test_form.test_field }} diff --git a/testbed/django__django/tests/template_backends/templates/template_backends/hello.html b/testbed/django__django/tests/template_backends/templates/template_backends/hello.html new file mode 100644 index 0000000000000000000000000000000000000000..4ce626e9be1315b1487388c1eeac2e4da34a803e --- /dev/null +++ b/testbed/django__django/tests/template_backends/templates/template_backends/hello.html @@ -0,0 +1 @@ +Hello {{ name }}! diff --git a/testbed/django__django/tests/template_backends/templates/template_backends/syntax_error.html b/testbed/django__django/tests/template_backends/templates/template_backends/syntax_error.html new file mode 100644 index 0000000000000000000000000000000000000000..d4f7a578fc790e3ad7979c20fb0a12dea6215d91 --- /dev/null +++ b/testbed/django__django/tests/template_backends/templates/template_backends/syntax_error.html @@ -0,0 +1 @@ +{% block %} diff --git a/testbed/django__django/tests/template_backends/test_django.py b/testbed/django__django/tests/template_backends/test_django.py new file mode 100644 index 0000000000000000000000000000000000000000..76f7a26db6c999c10a865e9abc901fc5b8e523b7 --- /dev/null +++ b/testbed/django__django/tests/template_backends/test_django.py @@ -0,0 +1,208 @@ +from pathlib import Path + +from template_tests.test_response import test_processor_name + +from django.template import Context, EngineHandler, RequestContext +from django.template.backends.django import DjangoTemplates +from django.template.library import InvalidTemplateLibrary +from django.test import RequestFactory, override_settings + +from .test_dummy import TemplateStringsTests + + +class DjangoTemplatesTests(TemplateStringsTests): + engine_class = DjangoTemplates + backend_name = "django" + request_factory = RequestFactory() + + def test_context_has_priority_over_template_context_processors(self): + # See ticket #23789. + engine = DjangoTemplates( + { + "DIRS": [], + "APP_DIRS": False, + "NAME": "django", + "OPTIONS": { + "context_processors": [test_processor_name], + }, + } + ) + + template = engine.from_string("{{ processors }}") + request = self.request_factory.get("/") + + # Context processors run + content = template.render({}, request) + self.assertEqual(content, "yes") + + # Context overrides context processors + content = template.render({"processors": "no"}, request) + self.assertEqual(content, "no") + + def test_render_requires_dict(self): + """django.Template.render() requires a dict.""" + engine = DjangoTemplates( + { + "DIRS": [], + "APP_DIRS": False, + "NAME": "django", + "OPTIONS": {}, + } + ) + template = engine.from_string("") + context = Context() + request_context = RequestContext(self.request_factory.get("/"), {}) + msg = "context must be a dict rather than Context." + with self.assertRaisesMessage(TypeError, msg): + template.render(context) + msg = "context must be a dict rather than RequestContext." + with self.assertRaisesMessage(TypeError, msg): + template.render(request_context) + + @override_settings(INSTALLED_APPS=["template_backends.apps.good"]) + def test_templatetag_discovery(self): + engine = DjangoTemplates( + { + "DIRS": [], + "APP_DIRS": False, + "NAME": "django", + "OPTIONS": { + "libraries": { + "alternate": ( + "template_backends.apps.good.templatetags.good_tags" + ), + "override": ( + "template_backends.apps.good.templatetags.good_tags" + ), + }, + }, + } + ) + + # libraries are discovered from installed applications + self.assertEqual( + engine.engine.libraries["good_tags"], + "template_backends.apps.good.templatetags.good_tags", + ) + self.assertEqual( + engine.engine.libraries["subpackage.tags"], + "template_backends.apps.good.templatetags.subpackage.tags", + ) + # libraries are discovered from django.templatetags + self.assertEqual( + engine.engine.libraries["static"], + "django.templatetags.static", + ) + # libraries passed in OPTIONS are registered + self.assertEqual( + engine.engine.libraries["alternate"], + "template_backends.apps.good.templatetags.good_tags", + ) + # libraries passed in OPTIONS take precedence over discovered ones + self.assertEqual( + engine.engine.libraries["override"], + "template_backends.apps.good.templatetags.good_tags", + ) + + @override_settings(INSTALLED_APPS=["template_backends.apps.importerror"]) + def test_templatetag_discovery_import_error(self): + """ + Import errors in tag modules should be reraised with a helpful message. + """ + with self.assertRaisesMessage( + InvalidTemplateLibrary, + "ImportError raised when trying to load " + "'template_backends.apps.importerror.templatetags.broken_tags'", + ) as cm: + DjangoTemplates( + { + "DIRS": [], + "APP_DIRS": False, + "NAME": "django", + "OPTIONS": {}, + } + ) + self.assertIsInstance(cm.exception.__cause__, ImportError) + + def test_builtins_discovery(self): + engine = DjangoTemplates( + { + "DIRS": [], + "APP_DIRS": False, + "NAME": "django", + "OPTIONS": { + "builtins": ["template_backends.apps.good.templatetags.good_tags"], + }, + } + ) + + self.assertEqual( + engine.engine.builtins, + [ + "django.template.defaulttags", + "django.template.defaultfilters", + "django.template.loader_tags", + "template_backends.apps.good.templatetags.good_tags", + ], + ) + + def test_autoescape_off(self): + templates = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "OPTIONS": {"autoescape": False}, + } + ] + engines = EngineHandler(templates=templates) + self.assertEqual( + engines["django"] + .from_string("Hello, {{ name }}") + .render({"name": "Bob & Jim"}), + "Hello, Bob & Jim", + ) + + def test_autoescape_default(self): + templates = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + } + ] + engines = EngineHandler(templates=templates) + self.assertEqual( + engines["django"] + .from_string("Hello, {{ name }}") + .render({"name": "Bob & Jim"}), + "Hello, Bob & Jim", + ) + + def test_default_template_loaders(self): + """The cached template loader is always enabled by default.""" + for debug in (True, False): + with self.subTest(DEBUG=debug), self.settings(DEBUG=debug): + engine = DjangoTemplates( + {"DIRS": [], "APP_DIRS": True, "NAME": "django", "OPTIONS": {}} + ) + self.assertEqual( + engine.engine.loaders, + [ + ( + "django.template.loaders.cached.Loader", + [ + "django.template.loaders.filesystem.Loader", + "django.template.loaders.app_directories.Loader", + ], + ) + ], + ) + + def test_dirs_pathlib(self): + engine = DjangoTemplates( + { + "DIRS": [Path(__file__).parent / "templates" / "template_backends"], + "APP_DIRS": False, + "NAME": "django", + "OPTIONS": {}, + } + ) + template = engine.get_template("hello.html") + self.assertEqual(template.render({"name": "Joe"}), "Hello Joe!\n") diff --git a/testbed/django__django/tests/template_backends/test_dummy.py b/testbed/django__django/tests/template_backends/test_dummy.py new file mode 100644 index 0000000000000000000000000000000000000000..a8f55373345a601343ecd847a23ec328900a7a04 --- /dev/null +++ b/testbed/django__django/tests/template_backends/test_dummy.py @@ -0,0 +1,108 @@ +import re + +from django.forms import CharField, Form, Media +from django.http import HttpRequest, HttpResponse +from django.middleware.csrf import ( + CSRF_TOKEN_LENGTH, + CsrfViewMiddleware, + _unmask_cipher_token, + get_token, +) +from django.template import TemplateDoesNotExist, TemplateSyntaxError +from django.template.backends.dummy import TemplateStrings +from django.test import SimpleTestCase + + +class TemplateStringsTests(SimpleTestCase): + engine_class = TemplateStrings + backend_name = "dummy" + options = {} + + @classmethod + def setUpClass(cls): + super().setUpClass() + params = { + "DIRS": [], + "APP_DIRS": True, + "NAME": cls.backend_name, + "OPTIONS": cls.options, + } + cls.engine = cls.engine_class(params) + + def test_from_string(self): + template = self.engine.from_string("Hello!\n") + content = template.render() + self.assertEqual(content, "Hello!\n") + + def test_get_template(self): + template = self.engine.get_template("template_backends/hello.html") + content = template.render({"name": "world"}) + self.assertEqual(content, "Hello world!\n") + + def test_get_template_nonexistent(self): + with self.assertRaises(TemplateDoesNotExist) as e: + self.engine.get_template("template_backends/nonexistent.html") + self.assertEqual(e.exception.backend, self.engine) + + def test_get_template_syntax_error(self): + # There's no way to trigger a syntax error with the dummy backend. + # The test still lives here to factor it between other backends. + if self.backend_name == "dummy": + self.skipTest("test doesn't apply to dummy backend") + with self.assertRaises(TemplateSyntaxError): + self.engine.get_template("template_backends/syntax_error.html") + + def test_html_escaping(self): + template = self.engine.get_template("template_backends/hello.html") + context = {"name": ''} + content = template.render(context) + + self.assertIn("<script>", content) + self.assertNotIn(""), + "\\u003Cscript\\u003Eand this\\u003C/script\\u003E", + ) + + def test_paragraph_separator(self): + self.assertEqual( + escapejs_filter("paragraph separator:\u2029and line separator:\u2028"), + "paragraph separator:\\u2029and line separator:\\u2028", + ) + + def test_lazy_string(self): + append_script = lazy(lambda string: r"" + string, str) + self.assertEqual( + escapejs_filter(append_script("whitespace: \r\n\t\v\f\b")), + "\\u003Cscript\\u003Ethis\\u003C/script\\u003E" + "whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008", + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_escapeseq.py b/testbed/django__django/tests/template_tests/filter_tests/test_escapeseq.py new file mode 100644 index 0000000000000000000000000000000000000000..27092f5828bb6165c866738b60d2d81b60c67f16 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_escapeseq.py @@ -0,0 +1,59 @@ +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class EscapeseqTests(SimpleTestCase): + """ + The "escapeseq" filter works the same whether autoescape is on or off, + and has no effect on strings already marked as safe. + """ + + @setup( + { + "escapeseq_basic": ( + '{{ a|escapeseq|join:", " }} -- {{ b|escapeseq|join:", " }}' + ), + } + ) + def test_basic(self): + output = self.engine.render_to_string( + "escapeseq_basic", + {"a": ["x&y", "

    "], "b": [mark_safe("x&y"), mark_safe("

    ")]}, + ) + self.assertEqual(output, "x&y, <p> -- x&y,

    ") + + @setup( + { + "escapeseq_autoescape_off": ( + '{% autoescape off %}{{ a|escapeseq|join:", " }}' + " -- " + '{{ b|escapeseq|join:", "}}{% endautoescape %}' + ) + } + ) + def test_autoescape_off(self): + output = self.engine.render_to_string( + "escapeseq_autoescape_off", + {"a": ["x&y", "

    "], "b": [mark_safe("x&y"), mark_safe("

    ")]}, + ) + self.assertEqual(output, "x&y, <p> -- x&y,

    ") + + @setup({"escapeseq_join": '{{ a|escapeseq|join:"
    " }}'}) + def test_chain_join(self): + output = self.engine.render_to_string("escapeseq_join", {"a": ["x&y", "

    "]}) + self.assertEqual(output, "x&y
    <p>") + + @setup( + { + "escapeseq_join_autoescape_off": ( + '{% autoescape off %}{{ a|escapeseq|join:"
    " }}{% endautoescape %}' + ), + } + ) + def test_chain_join_autoescape_off(self): + output = self.engine.render_to_string( + "escapeseq_join_autoescape_off", {"a": ["x&y", "

    "]} + ) + self.assertEqual(output, "x&y
    <p>") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_filesizeformat.py b/testbed/django__django/tests/template_tests/filter_tests/test_filesizeformat.py new file mode 100644 index 0000000000000000000000000000000000000000..fee742f9d5c518af0d2d37ad3c2b715e74489c06 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_filesizeformat.py @@ -0,0 +1,62 @@ +from django.template.defaultfilters import filesizeformat +from django.test import SimpleTestCase +from django.utils import translation + + +class FunctionTests(SimpleTestCase): + def test_formats(self): + tests = [ + (0, "0\xa0bytes"), + (1, "1\xa0byte"), + (1023, "1023\xa0bytes"), + (1024, "1.0\xa0KB"), + (10 * 1024, "10.0\xa0KB"), + (1024 * 1024 - 1, "1024.0\xa0KB"), + (1024 * 1024, "1.0\xa0MB"), + (1024 * 1024 * 50, "50.0\xa0MB"), + (1024 * 1024 * 1024 - 1, "1024.0\xa0MB"), + (1024 * 1024 * 1024, "1.0\xa0GB"), + (1024 * 1024 * 1024 * 1024, "1.0\xa0TB"), + (1024 * 1024 * 1024 * 1024 * 1024, "1.0\xa0PB"), + (1024 * 1024 * 1024 * 1024 * 1024 * 2000, "2000.0\xa0PB"), + (complex(1, -1), "0\xa0bytes"), + ("", "0\xa0bytes"), + ("\N{GREEK SMALL LETTER ALPHA}", "0\xa0bytes"), + ] + for value, expected in tests: + with self.subTest(value=value): + self.assertEqual(filesizeformat(value), expected) + + def test_localized_formats(self): + tests = [ + (0, "0\xa0Bytes"), + (1, "1\xa0Byte"), + (1023, "1023\xa0Bytes"), + (1024, "1,0\xa0KB"), + (10 * 1024, "10,0\xa0KB"), + (1024 * 1024 - 1, "1024,0\xa0KB"), + (1024 * 1024, "1,0\xa0MB"), + (1024 * 1024 * 50, "50,0\xa0MB"), + (1024 * 1024 * 1024 - 1, "1024,0\xa0MB"), + (1024 * 1024 * 1024, "1,0\xa0GB"), + (1024 * 1024 * 1024 * 1024, "1,0\xa0TB"), + (1024 * 1024 * 1024 * 1024 * 1024, "1,0\xa0PB"), + (1024 * 1024 * 1024 * 1024 * 1024 * 2000, "2000,0\xa0PB"), + (complex(1, -1), "0\xa0Bytes"), + ("", "0\xa0Bytes"), + ("\N{GREEK SMALL LETTER ALPHA}", "0\xa0Bytes"), + ] + with translation.override("de"): + for value, expected in tests: + with self.subTest(value=value): + self.assertEqual(filesizeformat(value), expected) + + def test_negative_numbers(self): + tests = [ + (-1, "-1\xa0byte"), + (-100, "-100\xa0bytes"), + (-1024 * 1024 * 50, "-50.0\xa0MB"), + ] + for value, expected in tests: + with self.subTest(value=value): + self.assertEqual(filesizeformat(value), expected) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_first.py b/testbed/django__django/tests/template_tests/filter_tests/test_first.py new file mode 100644 index 0000000000000000000000000000000000000000..224dec7b0fa56d62da274bc1108a6fd782eceb43 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_first.py @@ -0,0 +1,38 @@ +from django.template.defaultfilters import first +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class FirstTests(SimpleTestCase): + @setup({"first01": "{{ a|first }} {{ b|first }}"}) + def test_first01(self): + output = self.engine.render_to_string( + "first01", {"a": ["a&b", "x"], "b": [mark_safe("a&b"), "x"]} + ) + self.assertEqual(output, "a&b a&b") + + @setup( + { + "first02": ( + "{% autoescape off %}{{ a|first }} {{ b|first }}{% endautoescape %}" + ) + } + ) + def test_first02(self): + output = self.engine.render_to_string( + "first02", {"a": ["a&b", "x"], "b": [mark_safe("a&b"), "x"]} + ) + self.assertEqual(output, "a&b a&b") + + +class FunctionTests(SimpleTestCase): + def test_list(self): + self.assertEqual(first([0, 1, 2]), 0) + + def test_empty_string(self): + self.assertEqual(first(""), "") + + def test_string(self): + self.assertEqual(first("test"), "t") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_floatformat.py b/testbed/django__django/tests/template_tests/filter_tests/test_floatformat.py new file mode 100644 index 0000000000000000000000000000000000000000..db176223096237254ae5138b46128a8d9f9be28f --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_floatformat.py @@ -0,0 +1,159 @@ +from decimal import Decimal, localcontext + +from django.template.defaultfilters import floatformat +from django.test import SimpleTestCase +from django.utils import translation +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class FloatformatTests(SimpleTestCase): + @setup( + { + "floatformat01": ( + "{% autoescape off %}{{ a|floatformat }} {{ b|floatformat }}" + "{% endautoescape %}" + ) + } + ) + def test_floatformat01(self): + output = self.engine.render_to_string( + "floatformat01", {"a": "1.42", "b": mark_safe("1.42")} + ) + self.assertEqual(output, "1.4 1.4") + + @setup({"floatformat02": "{{ a|floatformat }} {{ b|floatformat }}"}) + def test_floatformat02(self): + output = self.engine.render_to_string( + "floatformat02", {"a": "1.42", "b": mark_safe("1.42")} + ) + self.assertEqual(output, "1.4 1.4") + + +class FunctionTests(SimpleTestCase): + def test_inputs(self): + self.assertEqual(floatformat(7.7), "7.7") + self.assertEqual(floatformat(7.0), "7") + self.assertEqual(floatformat(0.7), "0.7") + self.assertEqual(floatformat(-0.7), "-0.7") + self.assertEqual(floatformat(0.07), "0.1") + self.assertEqual(floatformat(-0.07), "-0.1") + self.assertEqual(floatformat(0.007), "0.0") + self.assertEqual(floatformat(0.0), "0") + self.assertEqual(floatformat(7.7, 0), "8") + self.assertEqual(floatformat(7.7, 3), "7.700") + self.assertEqual(floatformat(6.000000, 3), "6.000") + self.assertEqual(floatformat(6.200000, 3), "6.200") + self.assertEqual(floatformat(6.200000, -3), "6.200") + self.assertEqual(floatformat(13.1031, -3), "13.103") + self.assertEqual(floatformat(11.1197, -2), "11.12") + self.assertEqual(floatformat(11.0000, -2), "11") + self.assertEqual(floatformat(11.000001, -2), "11.00") + self.assertEqual(floatformat(8.2798, 3), "8.280") + self.assertEqual(floatformat(5555.555, 2), "5555.56") + self.assertEqual(floatformat(001.3000, 2), "1.30") + self.assertEqual(floatformat(0.12345, 2), "0.12") + self.assertEqual(floatformat(Decimal("555.555"), 2), "555.56") + self.assertEqual(floatformat(Decimal("09.000")), "9") + self.assertEqual( + floatformat(Decimal("123456.123456789012345678901"), 21), + "123456.123456789012345678901", + ) + self.assertEqual(floatformat("foo"), "") + self.assertEqual(floatformat(13.1031, "bar"), "13.1031") + self.assertEqual(floatformat(18.125, 2), "18.13") + self.assertEqual(floatformat("foo", "bar"), "") + self.assertEqual(floatformat("¿Cómo esta usted?"), "") + self.assertEqual(floatformat(None), "") + self.assertEqual( + floatformat(-1.323297138040798e35, 2), + "-132329713804079800000000000000000000.00", + ) + self.assertEqual( + floatformat(-1.323297138040798e35, -2), + "-132329713804079800000000000000000000", + ) + self.assertEqual(floatformat(1.5e-15, 20), "0.00000000000000150000") + self.assertEqual(floatformat(1.5e-15, -20), "0.00000000000000150000") + self.assertEqual(floatformat(1.00000000000000015, 16), "1.0000000000000002") + + def test_force_grouping(self): + with translation.override("en"): + self.assertEqual(floatformat(10000, "g"), "10,000") + self.assertEqual(floatformat(66666.666, "1g"), "66,666.7") + # Invalid suffix. + self.assertEqual(floatformat(10000, "g2"), "10000") + with translation.override("de", deactivate=True): + self.assertEqual(floatformat(10000, "g"), "10.000") + self.assertEqual(floatformat(66666.666, "1g"), "66.666,7") + # Invalid suffix. + self.assertEqual(floatformat(10000, "g2"), "10000") + + def test_unlocalize(self): + with translation.override("de", deactivate=True): + self.assertEqual(floatformat(66666.666, "2"), "66666,67") + self.assertEqual(floatformat(66666.666, "2u"), "66666.67") + with self.settings( + USE_THOUSAND_SEPARATOR=True, + NUMBER_GROUPING=3, + THOUSAND_SEPARATOR="!", + ): + self.assertEqual(floatformat(66666.666, "2gu"), "66!666.67") + self.assertEqual(floatformat(66666.666, "2ug"), "66!666.67") + # Invalid suffix. + self.assertEqual(floatformat(66666.666, "u2"), "66666.666") + + def test_zero_values(self): + self.assertEqual(floatformat(0, 6), "0.000000") + self.assertEqual(floatformat(0, 7), "0.0000000") + self.assertEqual(floatformat(0, 10), "0.0000000000") + self.assertEqual( + floatformat(0.000000000000000000015, 20), "0.00000000000000000002" + ) + self.assertEqual(floatformat("0.00", 0), "0") + self.assertEqual(floatformat(Decimal("0.00"), 0), "0") + self.assertEqual(floatformat("0.0000", 2), "0.00") + self.assertEqual(floatformat(Decimal("0.0000"), 2), "0.00") + self.assertEqual(floatformat("0.000000", 4), "0.0000") + self.assertEqual(floatformat(Decimal("0.000000"), 4), "0.0000") + + def test_negative_zero_values(self): + tests = [ + (-0.01, -1, "0.0"), + (-0.001, 2, "0.00"), + (-0.499, 0, "0"), + ] + for num, decimal_places, expected in tests: + with self.subTest(num=num, decimal_places=decimal_places): + self.assertEqual(floatformat(num, decimal_places), expected) + + def test_infinity(self): + pos_inf = float(1e30000) + neg_inf = float(-1e30000) + self.assertEqual(floatformat(pos_inf), "inf") + self.assertEqual(floatformat(neg_inf), "-inf") + self.assertEqual(floatformat(pos_inf / pos_inf), "nan") + + def test_float_dunder_method(self): + class FloatWrapper: + def __init__(self, value): + self.value = value + + def __float__(self): + return self.value + + self.assertEqual(floatformat(FloatWrapper(11.000001), -2), "11.00") + + def test_low_decimal_precision(self): + """ + #15789 + """ + with localcontext() as ctx: + ctx.prec = 2 + self.assertEqual(floatformat(1.2345, 2), "1.23") + self.assertEqual(floatformat(15.2042, -3), "15.204") + self.assertEqual(floatformat(1.2345, "2"), "1.23") + self.assertEqual(floatformat(15.2042, "-3"), "15.204") + self.assertEqual(floatformat(Decimal("1.2345"), 2), "1.23") + self.assertEqual(floatformat(Decimal("15.2042"), -3), "15.204") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_force_escape.py b/testbed/django__django/tests/template_tests/filter_tests/test_force_escape.py new file mode 100644 index 0000000000000000000000000000000000000000..02a4063adc9d1c60750ea625a110ec18687b6dc5 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_force_escape.py @@ -0,0 +1,92 @@ +from django.template.defaultfilters import force_escape +from django.test import SimpleTestCase +from django.utils.safestring import SafeData + +from ..utils import setup + + +class ForceEscapeTests(SimpleTestCase): + """ + Force_escape is applied immediately. It can be used to provide + double-escaping, for example. + """ + + @setup( + { + "force-escape01": ( + "{% autoescape off %}{{ a|force_escape }}{% endautoescape %}" + ) + } + ) + def test_force_escape01(self): + output = self.engine.render_to_string("force-escape01", {"a": "x&y"}) + self.assertEqual(output, "x&y") + + @setup({"force-escape02": "{{ a|force_escape }}"}) + def test_force_escape02(self): + output = self.engine.render_to_string("force-escape02", {"a": "x&y"}) + self.assertEqual(output, "x&y") + + @setup( + { + "force-escape03": ( + "{% autoescape off %}{{ a|force_escape|force_escape }}" + "{% endautoescape %}" + ) + } + ) + def test_force_escape03(self): + output = self.engine.render_to_string("force-escape03", {"a": "x&y"}) + self.assertEqual(output, "x&amp;y") + + @setup({"force-escape04": "{{ a|force_escape|force_escape }}"}) + def test_force_escape04(self): + output = self.engine.render_to_string("force-escape04", {"a": "x&y"}) + self.assertEqual(output, "x&amp;y") + + # Because the result of force_escape is "safe", an additional + # escape filter has no effect. + @setup( + { + "force-escape05": ( + "{% autoescape off %}{{ a|force_escape|escape }}{% endautoescape %}" + ) + } + ) + def test_force_escape05(self): + output = self.engine.render_to_string("force-escape05", {"a": "x&y"}) + self.assertEqual(output, "x&y") + + @setup({"force-escape06": "{{ a|force_escape|escape }}"}) + def test_force_escape06(self): + output = self.engine.render_to_string("force-escape06", {"a": "x&y"}) + self.assertEqual(output, "x&y") + + @setup( + { + "force-escape07": ( + "{% autoescape off %}{{ a|escape|force_escape }}{% endautoescape %}" + ) + } + ) + def test_force_escape07(self): + output = self.engine.render_to_string("force-escape07", {"a": "x&y"}) + self.assertEqual(output, "x&amp;y") + + @setup({"force-escape08": "{{ a|escape|force_escape }}"}) + def test_force_escape08(self): + output = self.engine.render_to_string("force-escape08", {"a": "x&y"}) + self.assertEqual(output, "x&amp;y") + + +class FunctionTests(SimpleTestCase): + def test_escape(self): + escaped = force_escape(" here") + self.assertEqual(escaped, "<some html & special characters > here") + self.assertIsInstance(escaped, SafeData) + + def test_unicode(self): + self.assertEqual( + force_escape(" here ĐÅ€£"), + "<some html & special characters > here \u0110\xc5\u20ac\xa3", + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_get_digit.py b/testbed/django__django/tests/template_tests/filter_tests/test_get_digit.py new file mode 100644 index 0000000000000000000000000000000000000000..4b10ca93edd58d929579b05360047a85fb10cbc6 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_get_digit.py @@ -0,0 +1,14 @@ +from django.template.defaultfilters import get_digit +from django.test import SimpleTestCase + + +class FunctionTests(SimpleTestCase): + def test_values(self): + self.assertEqual(get_digit(123, 1), 3) + self.assertEqual(get_digit(123, 2), 2) + self.assertEqual(get_digit(123, 3), 1) + self.assertEqual(get_digit(123, 4), 0) + self.assertEqual(get_digit(123, 0), 123) + + def test_string(self): + self.assertEqual(get_digit("xyz", 0), "xyz") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_iriencode.py b/testbed/django__django/tests/template_tests/filter_tests/test_iriencode.py new file mode 100644 index 0000000000000000000000000000000000000000..31fce93949e9939c5b954f9eb9694a3893e33f9a --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_iriencode.py @@ -0,0 +1,49 @@ +from django.template.defaultfilters import iriencode, urlencode +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class IriencodeTests(SimpleTestCase): + """ + Ensure iriencode keeps safe strings. + """ + + @setup({"iriencode01": "{{ url|iriencode }}"}) + def test_iriencode01(self): + output = self.engine.render_to_string("iriencode01", {"url": "?test=1&me=2"}) + self.assertEqual(output, "?test=1&me=2") + + @setup( + {"iriencode02": "{% autoescape off %}{{ url|iriencode }}{% endautoescape %}"} + ) + def test_iriencode02(self): + output = self.engine.render_to_string("iriencode02", {"url": "?test=1&me=2"}) + self.assertEqual(output, "?test=1&me=2") + + @setup({"iriencode03": "{{ url|iriencode }}"}) + def test_iriencode03(self): + output = self.engine.render_to_string( + "iriencode03", {"url": mark_safe("?test=1&me=2")} + ) + self.assertEqual(output, "?test=1&me=2") + + @setup( + {"iriencode04": "{% autoescape off %}{{ url|iriencode }}{% endautoescape %}"} + ) + def test_iriencode04(self): + output = self.engine.render_to_string( + "iriencode04", {"url": mark_safe("?test=1&me=2")} + ) + self.assertEqual(output, "?test=1&me=2") + + +class FunctionTests(SimpleTestCase): + def test_unicode(self): + self.assertEqual(iriencode("S\xf8r-Tr\xf8ndelag"), "S%C3%B8r-Tr%C3%B8ndelag") + + def test_urlencoded(self): + self.assertEqual( + iriencode(urlencode("fran\xe7ois & jill")), "fran%C3%A7ois%20%26%20jill" + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_join.py b/testbed/django__django/tests/template_tests/filter_tests/test_join.py new file mode 100644 index 0000000000000000000000000000000000000000..b92b73297210f8e1ea93ef124b71e05091d1d03f --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_join.py @@ -0,0 +1,97 @@ +from django.template.defaultfilters import join +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class JoinTests(SimpleTestCase): + @setup({"join01": '{{ a|join:", " }}'}) + def test_join01(self): + output = self.engine.render_to_string("join01", {"a": ["alpha", "beta & me"]}) + self.assertEqual(output, "alpha, beta & me") + + @setup({"join02": '{% autoescape off %}{{ a|join:", " }}{% endautoescape %}'}) + def test_join02(self): + output = self.engine.render_to_string("join02", {"a": ["alpha", "beta & me"]}) + self.assertEqual(output, "alpha, beta & me") + + @setup({"join03": '{{ a|join:" & " }}'}) + def test_join03(self): + output = self.engine.render_to_string("join03", {"a": ["alpha", "beta & me"]}) + self.assertEqual(output, "alpha & beta & me") + + @setup({"join04": '{% autoescape off %}{{ a|join:" & " }}{% endautoescape %}'}) + def test_join04(self): + output = self.engine.render_to_string("join04", {"a": ["alpha", "beta & me"]}) + self.assertEqual(output, "alpha & beta & me") + + # Joining with unsafe joiners doesn't result in unsafe strings. + @setup({"join05": "{{ a|join:var }}"}) + def test_join05(self): + output = self.engine.render_to_string( + "join05", {"a": ["alpha", "beta & me"], "var": " & "} + ) + self.assertEqual(output, "alpha & beta & me") + + @setup({"join06": "{{ a|join:var }}"}) + def test_join06(self): + output = self.engine.render_to_string( + "join06", {"a": ["alpha", "beta & me"], "var": mark_safe(" & ")} + ) + self.assertEqual(output, "alpha & beta & me") + + @setup({"join07": "{{ a|join:var|lower }}"}) + def test_join07(self): + output = self.engine.render_to_string( + "join07", {"a": ["Alpha", "Beta & me"], "var": " & "} + ) + self.assertEqual(output, "alpha & beta & me") + + @setup({"join08": "{{ a|join:var|lower }}"}) + def test_join08(self): + output = self.engine.render_to_string( + "join08", {"a": ["Alpha", "Beta & me"], "var": mark_safe(" & ")} + ) + self.assertEqual(output, "alpha & beta & me") + + @setup( + { + "join_autoescape_off": ( + "{% autoescape off %}" + "{{ var_list|join:var_joiner }}" + "{% endautoescape %}" + ), + } + ) + def test_join_autoescape_off(self): + var_list = ["

    Hello World!

    ", "beta & me", ""] + context = {"var_list": var_list, "var_joiner": "
    "} + output = self.engine.render_to_string("join_autoescape_off", context) + expected_result = "

    Hello World!


    beta & me
    " + self.assertEqual(output, expected_result) + + +class FunctionTests(SimpleTestCase): + def test_list(self): + self.assertEqual(join([0, 1, 2], "glue"), "0glue1glue2") + + def test_autoescape(self): + self.assertEqual( + join(["", "", ""], "
    "), + "<a><br><img><br></a>", + ) + + def test_autoescape_off(self): + self.assertEqual( + join(["", "", ""], "
    ", autoescape=False), + "

    ", + ) + + def test_noniterable_arg(self): + obj = object() + self.assertEqual(join(obj, "
    "), obj) + + def test_noniterable_arg_autoescape_off(self): + obj = object() + self.assertEqual(join(obj, "
    ", autoescape=False), obj) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_json_script.py b/testbed/django__django/tests/template_tests/filter_tests/test_json_script.py new file mode 100644 index 0000000000000000000000000000000000000000..8b9d6556fef2e833b70206c4ab87c130813a650c --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_json_script.py @@ -0,0 +1,23 @@ +from django.test import SimpleTestCase + +from ..utils import setup + + +class JsonScriptTests(SimpleTestCase): + @setup({"json-tag01": '{{ value|json_script:"test_id" }}'}) + def test_basic(self): + output = self.engine.render_to_string( + "json-tag01", {"value": {"a": "testing\r\njson 'string\" escaping"}} + ) + self.assertEqual( + output, + '", + ) + + @setup({"json-tag02": "{{ value|json_script }}"}) + def test_without_id(self): + output = self.engine.render_to_string("json-tag02", {"value": {}}) + self.assertEqual(output, '') diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_last.py b/testbed/django__django/tests/template_tests/filter_tests/test_last.py new file mode 100644 index 0000000000000000000000000000000000000000..6b8b8fcbbcbae304e63e508f6f7a5a83d0a751df --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_last.py @@ -0,0 +1,27 @@ +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class LastTests(SimpleTestCase): + @setup({"last01": "{{ a|last }} {{ b|last }}"}) + def test_last01(self): + output = self.engine.render_to_string( + "last01", {"a": ["x", "a&b"], "b": ["x", mark_safe("a&b")]} + ) + self.assertEqual(output, "a&b a&b") + + @setup( + {"last02": "{% autoescape off %}{{ a|last }} {{ b|last }}{% endautoescape %}"} + ) + def test_last02(self): + output = self.engine.render_to_string( + "last02", {"a": ["x", "a&b"], "b": ["x", mark_safe("a&b")]} + ) + self.assertEqual(output, "a&b a&b") + + @setup({"empty_list": "{% autoescape off %}{{ a|last }}{% endautoescape %}"}) + def test_empty_list(self): + output = self.engine.render_to_string("empty_list", {"a": []}) + self.assertEqual(output, "") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_length.py b/testbed/django__django/tests/template_tests/filter_tests/test_length.py new file mode 100644 index 0000000000000000000000000000000000000000..507cff124b625e9eae1ca04ecc4f57929ebc70e5 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_length.py @@ -0,0 +1,58 @@ +from django.template.defaultfilters import length +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class LengthTests(SimpleTestCase): + @setup({"length01": "{{ list|length }}"}) + def test_length01(self): + output = self.engine.render_to_string( + "length01", {"list": ["4", None, True, {}]} + ) + self.assertEqual(output, "4") + + @setup({"length02": "{{ list|length }}"}) + def test_length02(self): + output = self.engine.render_to_string("length02", {"list": []}) + self.assertEqual(output, "0") + + @setup({"length03": "{{ string|length }}"}) + def test_length03(self): + output = self.engine.render_to_string("length03", {"string": ""}) + self.assertEqual(output, "0") + + @setup({"length04": "{{ string|length }}"}) + def test_length04(self): + output = self.engine.render_to_string("length04", {"string": "django"}) + self.assertEqual(output, "6") + + @setup({"length05": "{% if string|length == 6 %}Pass{% endif %}"}) + def test_length05(self): + output = self.engine.render_to_string( + "length05", {"string": mark_safe("django")} + ) + self.assertEqual(output, "Pass") + + # Invalid uses that should fail silently. + @setup({"length06": "{{ int|length }}"}) + def test_length06(self): + output = self.engine.render_to_string("length06", {"int": 7}) + self.assertEqual(output, "0") + + @setup({"length07": "{{ None|length }}"}) + def test_length07(self): + output = self.engine.render_to_string("length07", {"None": None}) + self.assertEqual(output, "0") + + +class FunctionTests(SimpleTestCase): + def test_string(self): + self.assertEqual(length("1234"), 4) + + def test_safestring(self): + self.assertEqual(length(mark_safe("1234")), 4) + + def test_list(self): + self.assertEqual(length([1, 2, 3, 4]), 4) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_length_is.py b/testbed/django__django/tests/template_tests/filter_tests/test_length_is.py new file mode 100644 index 0000000000000000000000000000000000000000..5f24b2ab592c1f9b2f4fb88a6843054e3e0f8d7d --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_length_is.py @@ -0,0 +1,130 @@ +from django.template.defaultfilters import length_is +from django.test import SimpleTestCase, ignore_warnings +from django.utils.deprecation import RemovedInDjango51Warning + +from ..utils import setup + + +@ignore_warnings(category=RemovedInDjango51Warning) +class LengthIsTests(SimpleTestCase): + @setup({"length_is01": '{% if some_list|length_is:"4" %}Four{% endif %}'}) + def test_length_is01(self): + output = self.engine.render_to_string( + "length_is01", {"some_list": ["4", None, True, {}]} + ) + self.assertEqual(output, "Four") + + @setup( + { + "length_is02": ( + '{% if some_list|length_is:"4" %}Four{% else %}Not Four{% endif %}' + ) + } + ) + def test_length_is02(self): + output = self.engine.render_to_string( + "length_is02", {"some_list": ["4", None, True, {}, 17]} + ) + self.assertEqual(output, "Not Four") + + @setup({"length_is03": '{% if mystring|length_is:"4" %}Four{% endif %}'}) + def test_length_is03(self): + output = self.engine.render_to_string("length_is03", {"mystring": "word"}) + self.assertEqual(output, "Four") + + @setup( + { + "length_is04": ( + '{% if mystring|length_is:"4" %}Four{% else %}Not Four{% endif %}' + ) + } + ) + def test_length_is04(self): + output = self.engine.render_to_string("length_is04", {"mystring": "Python"}) + self.assertEqual(output, "Not Four") + + @setup( + { + "length_is05": ( + '{% if mystring|length_is:"4" %}Four{% else %}Not Four{% endif %}' + ) + } + ) + def test_length_is05(self): + output = self.engine.render_to_string("length_is05", {"mystring": ""}) + self.assertEqual(output, "Not Four") + + @setup( + { + "length_is06": ( + "{% with var|length as my_length %}{{ my_length }}{% endwith %}" + ) + } + ) + def test_length_is06(self): + output = self.engine.render_to_string("length_is06", {"var": "django"}) + self.assertEqual(output, "6") + + # Boolean return value from length_is should not be coerced to a string + @setup( + { + "length_is07": ( + '{% if "X"|length_is:0 %}Length is 0{% else %}Length not 0{% endif %}' + ) + } + ) + def test_length_is07(self): + output = self.engine.render_to_string("length_is07", {}) + self.assertEqual(output, "Length not 0") + + @setup( + { + "length_is08": ( + '{% if "X"|length_is:1 %}Length is 1{% else %}Length not 1{% endif %}' + ) + } + ) + def test_length_is08(self): + output = self.engine.render_to_string("length_is08", {}) + self.assertEqual(output, "Length is 1") + + # Invalid uses that should fail silently. + @setup({"length_is09": '{{ var|length_is:"fish" }}'}) + def test_length_is09(self): + output = self.engine.render_to_string("length_is09", {"var": "django"}) + self.assertEqual(output, "") + + @setup({"length_is10": '{{ int|length_is:"1" }}'}) + def test_length_is10(self): + output = self.engine.render_to_string("length_is10", {"int": 7}) + self.assertEqual(output, "") + + @setup({"length_is11": '{{ none|length_is:"1" }}'}) + def test_length_is11(self): + output = self.engine.render_to_string("length_is11", {"none": None}) + self.assertEqual(output, "") + + +@ignore_warnings(category=RemovedInDjango51Warning) +class FunctionTests(SimpleTestCase): + def test_empty_list(self): + self.assertIs(length_is([], 0), True) + self.assertIs(length_is([], 1), False) + + def test_string(self): + self.assertIs(length_is("a", 1), True) + self.assertIs(length_is("a", 10), False) + + +class DeprecationTests(SimpleTestCase): + @setup( + {"length_is_warning": "{{ string|length_is:3 }}"}, + test_once=True, + ) + def test_length_is_warning(self): + msg = ( + "The length_is template filter is deprecated in favor of the length " + "template filter and the == operator within an {% if %} tag." + ) + with self.assertRaisesMessage(RemovedInDjango51Warning, msg): + self.engine.render_to_string("length_is_warning", {"string": "good"}) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_linebreaks.py b/testbed/django__django/tests/template_tests/filter_tests/test_linebreaks.py new file mode 100644 index 0000000000000000000000000000000000000000..10c8a92d6a6ae4df78f705f7ff0bafc9e5e72c10 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_linebreaks.py @@ -0,0 +1,72 @@ +from django.template.defaultfilters import linebreaks_filter +from django.test import SimpleTestCase +from django.utils.functional import lazy +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class LinebreaksTests(SimpleTestCase): + """ + The contents in "linebreaks" are escaped according to the current + autoescape setting. + """ + + @setup({"linebreaks01": "{{ a|linebreaks }} {{ b|linebreaks }}"}) + def test_linebreaks01(self): + output = self.engine.render_to_string( + "linebreaks01", {"a": "x&\ny", "b": mark_safe("x&\ny")} + ) + self.assertEqual(output, "

    x&
    y

    x&
    y

    ") + + @setup( + { + "linebreaks02": ( + "{% autoescape off %}{{ a|linebreaks }} {{ b|linebreaks }}" + "{% endautoescape %}" + ) + } + ) + def test_linebreaks02(self): + output = self.engine.render_to_string( + "linebreaks02", {"a": "x&\ny", "b": mark_safe("x&\ny")} + ) + self.assertEqual(output, "

    x&
    y

    x&
    y

    ") + + +class FunctionTests(SimpleTestCase): + def test_line(self): + self.assertEqual(linebreaks_filter("line 1"), "

    line 1

    ") + + def test_newline(self): + self.assertEqual(linebreaks_filter("line 1\nline 2"), "

    line 1
    line 2

    ") + + def test_carriage(self): + self.assertEqual(linebreaks_filter("line 1\rline 2"), "

    line 1
    line 2

    ") + + def test_carriage_newline(self): + self.assertEqual( + linebreaks_filter("line 1\r\nline 2"), "

    line 1
    line 2

    " + ) + + def test_non_string_input(self): + self.assertEqual(linebreaks_filter(123), "

    123

    ") + + def test_autoescape(self): + self.assertEqual( + linebreaks_filter("foo\nbar\nbuz"), + "

    foo
    <a>bar</a>
    buz

    ", + ) + + def test_autoescape_off(self): + self.assertEqual( + linebreaks_filter("foo\nbar\nbuz", autoescape=False), + "

    foo
    bar
    buz

    ", + ) + + def test_lazy_string_input(self): + add_header = lazy(lambda string: "Header\n\n" + string, str) + self.assertEqual( + linebreaks_filter(add_header("line 1\r\nline2")), + "

    Header

    \n\n

    line 1
    line2

    ", + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_linebreaksbr.py b/testbed/django__django/tests/template_tests/filter_tests/test_linebreaksbr.py new file mode 100644 index 0000000000000000000000000000000000000000..aa6505a47afc8ca9428f3af1c66299c91eeb67da --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_linebreaksbr.py @@ -0,0 +1,59 @@ +from django.template.defaultfilters import linebreaksbr +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class LinebreaksbrTests(SimpleTestCase): + """ + The contents in "linebreaksbr" are escaped according to the current + autoescape setting. + """ + + @setup({"linebreaksbr01": "{{ a|linebreaksbr }} {{ b|linebreaksbr }}"}) + def test_linebreaksbr01(self): + output = self.engine.render_to_string( + "linebreaksbr01", {"a": "x&\ny", "b": mark_safe("x&\ny")} + ) + self.assertEqual(output, "x&
    y x&
    y") + + @setup( + { + "linebreaksbr02": ( + "{% autoescape off %}{{ a|linebreaksbr }} {{ b|linebreaksbr }}" + "{% endautoescape %}" + ) + } + ) + def test_linebreaksbr02(self): + output = self.engine.render_to_string( + "linebreaksbr02", {"a": "x&\ny", "b": mark_safe("x&\ny")} + ) + self.assertEqual(output, "x&
    y x&
    y") + + +class FunctionTests(SimpleTestCase): + def test_newline(self): + self.assertEqual(linebreaksbr("line 1\nline 2"), "line 1
    line 2") + + def test_carriage(self): + self.assertEqual(linebreaksbr("line 1\rline 2"), "line 1
    line 2") + + def test_carriage_newline(self): + self.assertEqual(linebreaksbr("line 1\r\nline 2"), "line 1
    line 2") + + def test_non_string_input(self): + self.assertEqual(linebreaksbr(123), "123") + + def test_autoescape(self): + self.assertEqual( + linebreaksbr("foo\nbar\nbuz"), + "foo
    <a>bar</a>
    buz", + ) + + def test_autoescape_off(self): + self.assertEqual( + linebreaksbr("foo\nbar\nbuz", autoescape=False), + "foo
    bar
    buz", + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_linenumbers.py b/testbed/django__django/tests/template_tests/filter_tests/test_linenumbers.py new file mode 100644 index 0000000000000000000000000000000000000000..8b1f8ef8af7f389ce1144b4039a9e989689a7768 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_linenumbers.py @@ -0,0 +1,65 @@ +from django.template.defaultfilters import linenumbers +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class LinenumbersTests(SimpleTestCase): + """ + The contents of "linenumbers" is escaped according to the current + autoescape setting. + """ + + @setup({"linenumbers01": "{{ a|linenumbers }} {{ b|linenumbers }}"}) + def test_linenumbers01(self): + output = self.engine.render_to_string( + "linenumbers01", + {"a": "one\n\nthree", "b": mark_safe("one\n<two>\nthree")}, + ) + self.assertEqual( + output, "1. one\n2. <two>\n3. three 1. one\n2. <two>\n3. three" + ) + + @setup( + { + "linenumbers02": ( + "{% autoescape off %}{{ a|linenumbers }} {{ b|linenumbers }}" + "{% endautoescape %}" + ) + } + ) + def test_linenumbers02(self): + output = self.engine.render_to_string( + "linenumbers02", + {"a": "one\n\nthree", "b": mark_safe("one\n<two>\nthree")}, + ) + self.assertEqual( + output, "1. one\n2. \n3. three 1. one\n2. <two>\n3. three" + ) + + +class FunctionTests(SimpleTestCase): + def test_linenumbers(self): + self.assertEqual(linenumbers("line 1\nline 2"), "1. line 1\n2. line 2") + + def test_linenumbers2(self): + self.assertEqual( + linenumbers("\n".join(["x"] * 10)), + "01. x\n02. x\n03. x\n04. x\n05. x\n06. x\n07. x\n08. x\n09. x\n10. x", + ) + + def test_non_string_input(self): + self.assertEqual(linenumbers(123), "1. 123") + + def test_autoescape(self): + self.assertEqual( + linenumbers("foo\nbar\nbuz"), + "1. foo\n2. <a>bar</a>\n3. buz", + ) + + def test_autoescape_off(self): + self.assertEqual( + linenumbers("foo\nbar\nbuz", autoescape=False), + "1. foo\n2. bar\n3. buz", + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_ljust.py b/testbed/django__django/tests/template_tests/filter_tests/test_ljust.py new file mode 100644 index 0000000000000000000000000000000000000000..f07a3c88f755973dfa523742ff236a41d9b0582a --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_ljust.py @@ -0,0 +1,40 @@ +from django.template.defaultfilters import ljust +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class LjustTests(SimpleTestCase): + @setup( + { + "ljust01": ( + '{% autoescape off %}.{{ a|ljust:"5" }}. .{{ b|ljust:"5" }}.' + "{% endautoescape %}" + ) + } + ) + def test_ljust01(self): + output = self.engine.render_to_string( + "ljust01", {"a": "a&b", "b": mark_safe("a&b")} + ) + self.assertEqual(output, ".a&b . .a&b .") + + @setup({"ljust02": '.{{ a|ljust:"5" }}. .{{ b|ljust:"5" }}.'}) + def test_ljust02(self): + output = self.engine.render_to_string( + "ljust02", {"a": "a&b", "b": mark_safe("a&b")} + ) + self.assertEqual(output, ".a&b . .a&b .") + + +class FunctionTests(SimpleTestCase): + def test_ljust(self): + self.assertEqual(ljust("test", 10), "test ") + self.assertEqual(ljust("test", 3), "test") + + def test_less_than_string_length(self): + self.assertEqual(ljust("test", 3), "test") + + def test_non_string_input(self): + self.assertEqual(ljust(123, 4), "123 ") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_lower.py b/testbed/django__django/tests/template_tests/filter_tests/test_lower.py new file mode 100644 index 0000000000000000000000000000000000000000..ae30094d69ac3d5dac1b44b806a0315332faf5c2 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_lower.py @@ -0,0 +1,39 @@ +from django.template.defaultfilters import lower +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class LowerTests(SimpleTestCase): + @setup( + { + "lower01": ( + "{% autoescape off %}{{ a|lower }} {{ b|lower }}{% endautoescape %}" + ) + } + ) + def test_lower01(self): + output = self.engine.render_to_string( + "lower01", {"a": "Apple & banana", "b": mark_safe("Apple & banana")} + ) + self.assertEqual(output, "apple & banana apple & banana") + + @setup({"lower02": "{{ a|lower }} {{ b|lower }}"}) + def test_lower02(self): + output = self.engine.render_to_string( + "lower02", {"a": "Apple & banana", "b": mark_safe("Apple & banana")} + ) + self.assertEqual(output, "apple & banana apple & banana") + + +class FunctionTests(SimpleTestCase): + def test_lower(self): + self.assertEqual(lower("TEST"), "test") + + def test_unicode(self): + # uppercase E umlaut + self.assertEqual(lower("\xcb"), "\xeb") + + def test_non_string_input(self): + self.assertEqual(lower(123), "123") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_make_list.py b/testbed/django__django/tests/template_tests/filter_tests/test_make_list.py new file mode 100644 index 0000000000000000000000000000000000000000..ddf8dbc7e75628ecf4ae6fb1dd7d069c02baf28c --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_make_list.py @@ -0,0 +1,47 @@ +from django.template.defaultfilters import make_list +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class MakeListTests(SimpleTestCase): + """ + The make_list filter can destroy existing escaping, so the results are + escaped. + """ + + @setup({"make_list01": "{% autoescape off %}{{ a|make_list }}{% endautoescape %}"}) + def test_make_list01(self): + output = self.engine.render_to_string("make_list01", {"a": mark_safe("&")}) + self.assertEqual(output, "['&']") + + @setup({"make_list02": "{{ a|make_list }}"}) + def test_make_list02(self): + output = self.engine.render_to_string("make_list02", {"a": mark_safe("&")}) + self.assertEqual(output, "['&']") + + @setup( + { + "make_list03": ( + '{% autoescape off %}{{ a|make_list|stringformat:"s"|safe }}' + "{% endautoescape %}" + ) + } + ) + def test_make_list03(self): + output = self.engine.render_to_string("make_list03", {"a": mark_safe("&")}) + self.assertEqual(output, "['&']") + + @setup({"make_list04": '{{ a|make_list|stringformat:"s"|safe }}'}) + def test_make_list04(self): + output = self.engine.render_to_string("make_list04", {"a": mark_safe("&")}) + self.assertEqual(output, "['&']") + + +class FunctionTests(SimpleTestCase): + def test_string(self): + self.assertEqual(make_list("abc"), ["a", "b", "c"]) + + def test_integer(self): + self.assertEqual(make_list(1234), ["1", "2", "3", "4"]) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_phone2numeric.py b/testbed/django__django/tests/template_tests/filter_tests/test_phone2numeric.py new file mode 100644 index 0000000000000000000000000000000000000000..ee3805b70dd69430fea59e120c49148ba892e6e0 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_phone2numeric.py @@ -0,0 +1,45 @@ +from django.template.defaultfilters import phone2numeric_filter +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class Phone2numericTests(SimpleTestCase): + @setup({"phone2numeric01": "{{ a|phone2numeric }} {{ b|phone2numeric }}"}) + def test_phone2numeric01(self): + output = self.engine.render_to_string( + "phone2numeric01", + {"a": "<1-800-call-me>", "b": mark_safe("<1-800-call-me>")}, + ) + self.assertEqual(output, "<1-800-2255-63> <1-800-2255-63>") + + @setup( + { + "phone2numeric02": ( + "{% autoescape off %}{{ a|phone2numeric }} {{ b|phone2numeric }}" + "{% endautoescape %}" + ) + } + ) + def test_phone2numeric02(self): + output = self.engine.render_to_string( + "phone2numeric02", + {"a": "<1-800-call-me>", "b": mark_safe("<1-800-call-me>")}, + ) + self.assertEqual(output, "<1-800-2255-63> <1-800-2255-63>") + + @setup({"phone2numeric03": "{{ a|phone2numeric }}"}) + def test_phone2numeric03(self): + output = self.engine.render_to_string( + "phone2numeric03", + {"a": "How razorback-jumping frogs can level six piqued gymnasts!"}, + ) + self.assertEqual( + output, "469 729672225-5867464 37647 226 53835 749 747833 49662787!" + ) + + +class FunctionTests(SimpleTestCase): + def test_phone2numeric(self): + self.assertEqual(phone2numeric_filter("0800 flowers"), "0800 3569377") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_pluralize.py b/testbed/django__django/tests/template_tests/filter_tests/test_pluralize.py new file mode 100644 index 0000000000000000000000000000000000000000..c28718e1014e18746a8f9c11fe1a1dca0fe762aa --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_pluralize.py @@ -0,0 +1,64 @@ +from decimal import Decimal + +from django.template.defaultfilters import pluralize +from django.test import SimpleTestCase + +from ..utils import setup + + +class PluralizeTests(SimpleTestCase): + def check_values(self, *tests): + for value, expected in tests: + with self.subTest(value=value): + output = self.engine.render_to_string("t", {"value": value}) + self.assertEqual(output, expected) + + @setup({"t": "vote{{ value|pluralize }}"}) + def test_no_arguments(self): + self.check_values(("0", "votes"), ("1", "vote"), ("2", "votes")) + + @setup({"t": 'class{{ value|pluralize:"es" }}'}) + def test_suffix(self): + self.check_values(("0", "classes"), ("1", "class"), ("2", "classes")) + + @setup({"t": 'cand{{ value|pluralize:"y,ies" }}'}) + def test_singular_and_plural_suffix(self): + self.check_values(("0", "candies"), ("1", "candy"), ("2", "candies")) + + +class FunctionTests(SimpleTestCase): + def test_integers(self): + self.assertEqual(pluralize(1), "") + self.assertEqual(pluralize(0), "s") + self.assertEqual(pluralize(2), "s") + + def test_floats(self): + self.assertEqual(pluralize(0.5), "s") + self.assertEqual(pluralize(1.5), "s") + + def test_decimals(self): + self.assertEqual(pluralize(Decimal(1)), "") + self.assertEqual(pluralize(Decimal(0)), "s") + self.assertEqual(pluralize(Decimal(2)), "s") + + def test_lists(self): + self.assertEqual(pluralize([1]), "") + self.assertEqual(pluralize([]), "s") + self.assertEqual(pluralize([1, 2, 3]), "s") + + def test_suffixes(self): + self.assertEqual(pluralize(1, "es"), "") + self.assertEqual(pluralize(0, "es"), "es") + self.assertEqual(pluralize(2, "es"), "es") + self.assertEqual(pluralize(1, "y,ies"), "y") + self.assertEqual(pluralize(0, "y,ies"), "ies") + self.assertEqual(pluralize(2, "y,ies"), "ies") + self.assertEqual(pluralize(0, "y,ies,error"), "") + + def test_no_len_type(self): + self.assertEqual(pluralize(object(), "y,es"), "") + self.assertEqual(pluralize(object(), "es"), "") + + def test_value_error(self): + self.assertEqual(pluralize("", "y,es"), "") + self.assertEqual(pluralize("", "es"), "") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_random.py b/testbed/django__django/tests/template_tests/filter_tests/test_random.py new file mode 100644 index 0000000000000000000000000000000000000000..8f197e6f13cf4e25c903e4184aa58ef9d25cc6db --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_random.py @@ -0,0 +1,31 @@ +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class RandomTests(SimpleTestCase): + @setup({"random01": "{{ a|random }} {{ b|random }}"}) + def test_random01(self): + output = self.engine.render_to_string( + "random01", {"a": ["a&b", "a&b"], "b": [mark_safe("a&b"), mark_safe("a&b")]} + ) + self.assertEqual(output, "a&b a&b") + + @setup( + { + "random02": ( + "{% autoescape off %}{{ a|random }} {{ b|random }}{% endautoescape %}" + ) + } + ) + def test_random02(self): + output = self.engine.render_to_string( + "random02", {"a": ["a&b", "a&b"], "b": [mark_safe("a&b"), mark_safe("a&b")]} + ) + self.assertEqual(output, "a&b a&b") + + @setup({"empty_list": "{{ list|random }}"}) + def test_empty_list(self): + output = self.engine.render_to_string("empty_list", {"list": []}) + self.assertEqual(output, "") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_rjust.py b/testbed/django__django/tests/template_tests/filter_tests/test_rjust.py new file mode 100644 index 0000000000000000000000000000000000000000..2d12dfccf4bbe2b0142a730e88a98f3eade5364f --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_rjust.py @@ -0,0 +1,39 @@ +from django.template.defaultfilters import rjust +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class RjustTests(SimpleTestCase): + @setup( + { + "rjust01": ( + '{% autoescape off %}.{{ a|rjust:"5" }}. .{{ b|rjust:"5" }}.' + "{% endautoescape %}" + ) + } + ) + def test_rjust01(self): + output = self.engine.render_to_string( + "rjust01", {"a": "a&b", "b": mark_safe("a&b")} + ) + self.assertEqual(output, ". a&b. . a&b.") + + @setup({"rjust02": '.{{ a|rjust:"5" }}. .{{ b|rjust:"5" }}.'}) + def test_rjust02(self): + output = self.engine.render_to_string( + "rjust02", {"a": "a&b", "b": mark_safe("a&b")} + ) + self.assertEqual(output, ". a&b. . a&b.") + + +class FunctionTests(SimpleTestCase): + def test_rjust(self): + self.assertEqual(rjust("test", 10), " test") + + def test_less_than_string_length(self): + self.assertEqual(rjust("test", 3), "test") + + def test_non_string_input(self): + self.assertEqual(rjust(123, 4), " 123") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_safe.py b/testbed/django__django/tests/template_tests/filter_tests/test_safe.py new file mode 100644 index 0000000000000000000000000000000000000000..ecde1c820c942c420080e11c4e9263e00b9b7549 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_safe.py @@ -0,0 +1,15 @@ +from django.test import SimpleTestCase + +from ..utils import setup + + +class SafeTests(SimpleTestCase): + @setup({"safe01": "{{ a }} -- {{ a|safe }}"}) + def test_safe01(self): + output = self.engine.render_to_string("safe01", {"a": "hello"}) + self.assertEqual(output, "<b>hello</b> -- hello") + + @setup({"safe02": "{% autoescape off %}{{ a }} -- {{ a|safe }}{% endautoescape %}"}) + def test_safe02(self): + output = self.engine.render_to_string("safe02", {"a": "hello"}) + self.assertEqual(output, "hello -- hello") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_safeseq.py b/testbed/django__django/tests/template_tests/filter_tests/test_safeseq.py new file mode 100644 index 0000000000000000000000000000000000000000..e9b3feb0b3389bcb98719e781556c6ebb13426dc --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_safeseq.py @@ -0,0 +1,22 @@ +from django.test import SimpleTestCase + +from ..utils import setup + + +class SafeseqTests(SimpleTestCase): + @setup({"safeseq01": '{{ a|join:", " }} -- {{ a|safeseq|join:", " }}'}) + def test_safeseq01(self): + output = self.engine.render_to_string("safeseq01", {"a": ["&", "<"]}) + self.assertEqual(output, "&, < -- &, <") + + @setup( + { + "safeseq02": ( + '{% autoescape off %}{{ a|join:", " }} -- {{ a|safeseq|join:", " }}' + "{% endautoescape %}" + ) + } + ) + def test_safeseq02(self): + output = self.engine.render_to_string("safeseq02", {"a": ["&", "<"]}) + self.assertEqual(output, "&, < -- &, <") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_slice.py b/testbed/django__django/tests/template_tests/filter_tests/test_slice.py new file mode 100644 index 0000000000000000000000000000000000000000..5a5dd6b1550bc2402f5c3f53d9ca0211828aa8c6 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_slice.py @@ -0,0 +1,55 @@ +from django.template.defaultfilters import slice_filter +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class SliceTests(SimpleTestCase): + @setup({"slice01": '{{ a|slice:"1:3" }} {{ b|slice:"1:3" }}'}) + def test_slice01(self): + output = self.engine.render_to_string( + "slice01", {"a": "a&b", "b": mark_safe("a&b")} + ) + self.assertEqual(output, "&b &b") + + @setup( + { + "slice02": ( + '{% autoescape off %}{{ a|slice:"1:3" }} {{ b|slice:"1:3" }}' + "{% endautoescape %}" + ) + } + ) + def test_slice02(self): + output = self.engine.render_to_string( + "slice02", {"a": "a&b", "b": mark_safe("a&b")} + ) + self.assertEqual(output, "&b &b") + + +class FunctionTests(SimpleTestCase): + def test_zero_length(self): + self.assertEqual(slice_filter("abcdefg", "0"), "") + + def test_index(self): + self.assertEqual(slice_filter("abcdefg", "1"), "a") + + def test_index_integer(self): + self.assertEqual(slice_filter("abcdefg", 1), "a") + + def test_negative_index(self): + self.assertEqual(slice_filter("abcdefg", "-1"), "abcdef") + + def test_range(self): + self.assertEqual(slice_filter("abcdefg", "1:2"), "b") + + def test_range_multiple(self): + self.assertEqual(slice_filter("abcdefg", "1:3"), "bc") + + def test_range_step(self): + self.assertEqual(slice_filter("abcdefg", "0::2"), "aceg") + + def test_fail_silently(self): + obj = object() + self.assertEqual(slice_filter(obj, "0::2"), obj) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_slugify.py b/testbed/django__django/tests/template_tests/filter_tests/test_slugify.py new file mode 100644 index 0000000000000000000000000000000000000000..7bb18c5c89023b64c4f3ee999157536f60f7a6c8 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_slugify.py @@ -0,0 +1,63 @@ +from django.template.defaultfilters import slugify +from django.test import SimpleTestCase +from django.utils.functional import lazy +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class SlugifyTests(SimpleTestCase): + """ + Running slugify on a pre-escaped string leads to odd behavior, + but the result is still safe. + """ + + @setup( + { + "slugify01": ( + "{% autoescape off %}{{ a|slugify }} {{ b|slugify }}{% endautoescape %}" + ) + } + ) + def test_slugify01(self): + output = self.engine.render_to_string( + "slugify01", {"a": "a & b", "b": mark_safe("a & b")} + ) + self.assertEqual(output, "a-b a-amp-b") + + @setup({"slugify02": "{{ a|slugify }} {{ b|slugify }}"}) + def test_slugify02(self): + output = self.engine.render_to_string( + "slugify02", {"a": "a & b", "b": mark_safe("a & b")} + ) + self.assertEqual(output, "a-b a-amp-b") + + +class FunctionTests(SimpleTestCase): + def test_slugify(self): + self.assertEqual( + slugify( + " Jack & Jill like numbers 1,2,3 and 4 and silly characters ?%.$!/" + ), + "jack-jill-like-numbers-123-and-4-and-silly-characters", + ) + + def test_unicode(self): + self.assertEqual( + slugify("Un \xe9l\xe9phant \xe0 l'or\xe9e du bois"), + "un-elephant-a-loree-du-bois", + ) + + def test_non_string_input(self): + self.assertEqual(slugify(123), "123") + + def test_slugify_lazy_string(self): + lazy_str = lazy(lambda string: string, str) + self.assertEqual( + slugify( + lazy_str( + " Jack & Jill like numbers 1,2,3 and 4 and silly characters ?%.$!/" + ) + ), + "jack-jill-like-numbers-123-and-4-and-silly-characters", + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_stringformat.py b/testbed/django__django/tests/template_tests/filter_tests/test_stringformat.py new file mode 100644 index 0000000000000000000000000000000000000000..075906aecb22d662114cefb4a4072bcbec2d44ca --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_stringformat.py @@ -0,0 +1,51 @@ +from django.template.defaultfilters import stringformat +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class StringformatTests(SimpleTestCase): + """ + Notice that escaping is applied *after* any filters, so the string + formatting here only needs to deal with pre-escaped characters. + """ + + @setup( + { + "stringformat01": ( + '{% autoescape off %}.{{ a|stringformat:"5s" }}. .' + '{{ b|stringformat:"5s" }}.{% endautoescape %}' + ) + } + ) + def test_stringformat01(self): + output = self.engine.render_to_string( + "stringformat01", {"a": "ax

    y

    ", + "b": mark_safe("x

    y

    "), + }, + ) + self.assertEqual(output, "x y x y") + + @setup( + { + "striptags02": ( + "{% autoescape off %}{{ a|striptags }} {{ b|striptags }}" + "{% endautoescape %}" + ) + } + ) + def test_striptags02(self): + output = self.engine.render_to_string( + "striptags02", + { + "a": "x

    y

    ", + "b": mark_safe("x

    y

    "), + }, + ) + self.assertEqual(output, "x y x y") + + +class FunctionTests(SimpleTestCase): + def test_strip(self): + self.assertEqual( + striptags( + 'some html with disallowed ' + " tags" + ), + 'some html with alert("You smell") disallowed tags', + ) + + def test_non_string_input(self): + self.assertEqual(striptags(123), "123") + + def test_strip_lazy_string(self): + self.assertEqual( + striptags( + lazystr( + 'some html with disallowed ' + " tags" + ) + ), + 'some html with alert("Hello") disallowed tags', + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_time.py b/testbed/django__django/tests/template_tests/filter_tests/test_time.py new file mode 100644 index 0000000000000000000000000000000000000000..c8ab58b5531be834db3c858c51e63a0b4d31e365 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_time.py @@ -0,0 +1,67 @@ +from datetime import time + +from django.template.defaultfilters import time as time_filter +from django.test import SimpleTestCase +from django.utils import timezone, translation + +from ..utils import setup +from .timezone_utils import TimezoneTestCase + + +class TimeTests(TimezoneTestCase): + """ + #20693: Timezone support for the time template filter + """ + + @setup({"time00": "{{ dt|time }}"}) + def test_time00(self): + output = self.engine.render_to_string("time00", {"dt": time(16, 25)}) + self.assertEqual(output, "4:25 p.m.") + + @setup({"time00_l10n": "{{ dt|time }}"}) + def test_time00_l10n(self): + with translation.override("fr"): + output = self.engine.render_to_string("time00_l10n", {"dt": time(16, 25)}) + self.assertEqual(output, "16:25") + + @setup({"time01": '{{ dt|time:"e:O:T:Z" }}'}) + def test_time01(self): + output = self.engine.render_to_string("time01", {"dt": self.now_tz_i}) + self.assertEqual(output, "+0315:+0315:+0315:11700") + + @setup({"time02": '{{ dt|time:"e:T" }}'}) + def test_time02(self): + output = self.engine.render_to_string("time02", {"dt": self.now}) + self.assertEqual(output, ":" + self.now_tz.tzinfo.tzname(self.now_tz)) + + @setup({"time03": '{{ t|time:"P:e:O:T:Z" }}'}) + def test_time03(self): + output = self.engine.render_to_string( + "time03", {"t": time(4, 0, tzinfo=timezone.get_fixed_timezone(30))} + ) + self.assertEqual(output, "4 a.m.::::") + + @setup({"time04": '{{ t|time:"P:e:O:T:Z" }}'}) + def test_time04(self): + output = self.engine.render_to_string("time04", {"t": time(4, 0)}) + self.assertEqual(output, "4 a.m.::::") + + @setup({"time05": '{{ d|time:"P:e:O:T:Z" }}'}) + def test_time05(self): + output = self.engine.render_to_string("time05", {"d": self.today}) + self.assertEqual(output, "") + + @setup({"time06": '{{ obj|time:"P:e:O:T:Z" }}'}) + def test_time06(self): + output = self.engine.render_to_string("time06", {"obj": "non-datetime-value"}) + self.assertEqual(output, "") + + +class FunctionTests(SimpleTestCase): + def test_no_args(self): + self.assertEqual(time_filter(""), "") + self.assertEqual(time_filter(None), "") + + def test_inputs(self): + self.assertEqual(time_filter(time(13), "h"), "01") + self.assertEqual(time_filter(time(0), "h"), "12") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_timesince.py b/testbed/django__django/tests/template_tests/filter_tests/test_timesince.py new file mode 100644 index 0000000000000000000000000000000000000000..d623449e00c943d577890724c0984489b151f7cb --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_timesince.py @@ -0,0 +1,178 @@ +from datetime import datetime, timedelta + +from django.template.defaultfilters import timesince_filter +from django.test import SimpleTestCase +from django.test.utils import requires_tz_support + +from ..utils import setup +from .timezone_utils import TimezoneTestCase + + +class TimesinceTests(TimezoneTestCase): + """ + #20246 - \xa0 in output avoids line-breaks between value and unit + """ + + # Default compare with datetime.now() + @setup({"timesince01": "{{ a|timesince }}"}) + def test_timesince01(self): + output = self.engine.render_to_string( + "timesince01", {"a": datetime.now() + timedelta(minutes=-1, seconds=-10)} + ) + self.assertEqual(output, "1\xa0minute") + + @setup({"timesince02": "{{ a|timesince }}"}) + def test_timesince02(self): + output = self.engine.render_to_string( + "timesince02", {"a": datetime.now() - timedelta(days=1, minutes=1)} + ) + self.assertEqual(output, "1\xa0day") + + @setup({"timesince03": "{{ a|timesince }}"}) + def test_timesince03(self): + output = self.engine.render_to_string( + "timesince03", + {"a": datetime.now() - timedelta(hours=1, minutes=25, seconds=10)}, + ) + self.assertEqual(output, "1\xa0hour, 25\xa0minutes") + + # Compare to a given parameter + @setup({"timesince04": "{{ a|timesince:b }}"}) + def test_timesince04(self): + output = self.engine.render_to_string( + "timesince04", + {"a": self.now - timedelta(days=2), "b": self.now - timedelta(days=1)}, + ) + self.assertEqual(output, "1\xa0day") + + @setup({"timesince05": "{{ a|timesince:b }}"}) + def test_timesince05(self): + output = self.engine.render_to_string( + "timesince05", + { + "a": self.now - timedelta(days=2, minutes=1), + "b": self.now - timedelta(days=2), + }, + ) + self.assertEqual(output, "1\xa0minute") + + # Timezone is respected + @setup({"timesince06": "{{ a|timesince:b }}"}) + def test_timesince06(self): + output = self.engine.render_to_string( + "timesince06", {"a": self.now_tz - timedelta(hours=8), "b": self.now_tz} + ) + self.assertEqual(output, "8\xa0hours") + + # Tests for #7443 + @setup({"timesince07": "{{ earlier|timesince }}"}) + def test_timesince07(self): + output = self.engine.render_to_string( + "timesince07", {"earlier": self.now - timedelta(days=7)} + ) + self.assertEqual(output, "1\xa0week") + + @setup({"timesince08": "{{ earlier|timesince:now }}"}) + def test_timesince08(self): + output = self.engine.render_to_string( + "timesince08", {"now": self.now, "earlier": self.now - timedelta(days=7)} + ) + self.assertEqual(output, "1\xa0week") + + @setup({"timesince09": "{{ later|timesince }}"}) + def test_timesince09(self): + output = self.engine.render_to_string( + "timesince09", {"later": self.now + timedelta(days=7)} + ) + self.assertEqual(output, "0\xa0minutes") + + @setup({"timesince10": "{{ later|timesince:now }}"}) + def test_timesince10(self): + output = self.engine.render_to_string( + "timesince10", {"now": self.now, "later": self.now + timedelta(days=7)} + ) + self.assertEqual(output, "0\xa0minutes") + + # Differing timezones are calculated correctly. + @setup({"timesince11": "{{ a|timesince }}"}) + def test_timesince11(self): + output = self.engine.render_to_string("timesince11", {"a": self.now}) + self.assertEqual(output, "0\xa0minutes") + + @requires_tz_support + @setup({"timesince12": "{{ a|timesince }}"}) + def test_timesince12(self): + output = self.engine.render_to_string("timesince12", {"a": self.now_tz}) + self.assertEqual(output, "0\xa0minutes") + + @requires_tz_support + @setup({"timesince13": "{{ a|timesince }}"}) + def test_timesince13(self): + output = self.engine.render_to_string("timesince13", {"a": self.now_tz_i}) + self.assertEqual(output, "0\xa0minutes") + + @setup({"timesince14": "{{ a|timesince:b }}"}) + def test_timesince14(self): + output = self.engine.render_to_string( + "timesince14", {"a": self.now_tz, "b": self.now_tz_i} + ) + self.assertEqual(output, "0\xa0minutes") + + @setup({"timesince15": "{{ a|timesince:b }}"}) + def test_timesince15(self): + output = self.engine.render_to_string( + "timesince15", {"a": self.now, "b": self.now_tz_i} + ) + self.assertEqual(output, "") + + @setup({"timesince16": "{{ a|timesince:b }}"}) + def test_timesince16(self): + output = self.engine.render_to_string( + "timesince16", {"a": self.now_tz_i, "b": self.now} + ) + self.assertEqual(output, "") + + # Tests for #9065 (two date objects). + @setup({"timesince17": "{{ a|timesince:b }}"}) + def test_timesince17(self): + output = self.engine.render_to_string( + "timesince17", {"a": self.today, "b": self.today} + ) + self.assertEqual(output, "0\xa0minutes") + + @setup({"timesince18": "{{ a|timesince:b }}"}) + def test_timesince18(self): + output = self.engine.render_to_string( + "timesince18", {"a": self.today, "b": self.today + timedelta(hours=24)} + ) + self.assertEqual(output, "1\xa0day") + + # Tests for #33879 (wrong results for 11 months + several weeks). + @setup({"timesince19": "{{ earlier|timesince }}"}) + def test_timesince19(self): + output = self.engine.render_to_string( + "timesince19", {"earlier": self.today - timedelta(days=358)} + ) + self.assertEqual(output, "11\xa0months, 3\xa0weeks") + + @setup({"timesince20": "{{ a|timesince:b }}"}) + def test_timesince20(self): + now = datetime(2018, 5, 9) + output = self.engine.render_to_string( + "timesince20", + {"a": now, "b": now + timedelta(days=365) + timedelta(days=364)}, + ) + self.assertEqual(output, "1\xa0year, 11\xa0months") + + +class FunctionTests(SimpleTestCase): + def test_since_now(self): + self.assertEqual(timesince_filter(datetime.now() - timedelta(1)), "1\xa0day") + + def test_no_args(self): + self.assertEqual(timesince_filter(None), "") + + def test_explicit_date(self): + self.assertEqual( + timesince_filter(datetime(2005, 12, 29), datetime(2005, 12, 30)), "1\xa0day" + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_timeuntil.py b/testbed/django__django/tests/template_tests/filter_tests/test_timeuntil.py new file mode 100644 index 0000000000000000000000000000000000000000..d16ec1cfa9a97836702547f6244eac04603e65f1 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_timeuntil.py @@ -0,0 +1,144 @@ +from datetime import datetime, timedelta + +from django.template.defaultfilters import timeuntil_filter +from django.test import SimpleTestCase +from django.test.utils import requires_tz_support + +from ..utils import setup +from .timezone_utils import TimezoneTestCase + + +class TimeuntilTests(TimezoneTestCase): + # Default compare with datetime.now() + @setup({"timeuntil01": "{{ a|timeuntil }}"}) + def test_timeuntil01(self): + output = self.engine.render_to_string( + "timeuntil01", {"a": datetime.now() + timedelta(minutes=2, seconds=10)} + ) + self.assertEqual(output, "2\xa0minutes") + + @setup({"timeuntil02": "{{ a|timeuntil }}"}) + def test_timeuntil02(self): + output = self.engine.render_to_string( + "timeuntil02", {"a": (datetime.now() + timedelta(days=1, seconds=10))} + ) + self.assertEqual(output, "1\xa0day") + + @setup({"timeuntil03": "{{ a|timeuntil }}"}) + def test_timeuntil03(self): + output = self.engine.render_to_string( + "timeuntil03", + {"a": (datetime.now() + timedelta(hours=8, minutes=10, seconds=10))}, + ) + self.assertEqual(output, "8\xa0hours, 10\xa0minutes") + + # Compare to a given parameter + @setup({"timeuntil04": "{{ a|timeuntil:b }}"}) + def test_timeuntil04(self): + output = self.engine.render_to_string( + "timeuntil04", + {"a": self.now - timedelta(days=1), "b": self.now - timedelta(days=2)}, + ) + self.assertEqual(output, "1\xa0day") + + @setup({"timeuntil05": "{{ a|timeuntil:b }}"}) + def test_timeuntil05(self): + output = self.engine.render_to_string( + "timeuntil05", + { + "a": self.now - timedelta(days=2), + "b": self.now - timedelta(days=2, minutes=1), + }, + ) + self.assertEqual(output, "1\xa0minute") + + # Regression for #7443 + @setup({"timeuntil06": "{{ earlier|timeuntil }}"}) + def test_timeuntil06(self): + output = self.engine.render_to_string( + "timeuntil06", {"earlier": self.now - timedelta(days=7)} + ) + self.assertEqual(output, "0\xa0minutes") + + @setup({"timeuntil07": "{{ earlier|timeuntil:now }}"}) + def test_timeuntil07(self): + output = self.engine.render_to_string( + "timeuntil07", {"now": self.now, "earlier": self.now - timedelta(days=7)} + ) + self.assertEqual(output, "0\xa0minutes") + + @setup({"timeuntil08": "{{ later|timeuntil }}"}) + def test_timeuntil08(self): + output = self.engine.render_to_string( + "timeuntil08", {"later": self.now + timedelta(days=7, hours=1)} + ) + self.assertEqual(output, "1\xa0week") + + @setup({"timeuntil09": "{{ later|timeuntil:now }}"}) + def test_timeuntil09(self): + output = self.engine.render_to_string( + "timeuntil09", {"now": self.now, "later": self.now + timedelta(days=7)} + ) + self.assertEqual(output, "1\xa0week") + + # Differing timezones are calculated correctly. + @requires_tz_support + @setup({"timeuntil10": "{{ a|timeuntil }}"}) + def test_timeuntil10(self): + output = self.engine.render_to_string("timeuntil10", {"a": self.now_tz}) + self.assertEqual(output, "0\xa0minutes") + + @requires_tz_support + @setup({"timeuntil11": "{{ a|timeuntil }}"}) + def test_timeuntil11(self): + output = self.engine.render_to_string("timeuntil11", {"a": self.now_tz_i}) + self.assertEqual(output, "0\xa0minutes") + + @setup({"timeuntil12": "{{ a|timeuntil:b }}"}) + def test_timeuntil12(self): + output = self.engine.render_to_string( + "timeuntil12", {"a": self.now_tz_i, "b": self.now_tz} + ) + self.assertEqual(output, "0\xa0minutes") + + # Regression for #9065 (two date objects). + @setup({"timeuntil13": "{{ a|timeuntil:b }}"}) + def test_timeuntil13(self): + output = self.engine.render_to_string( + "timeuntil13", {"a": self.today, "b": self.today} + ) + self.assertEqual(output, "0\xa0minutes") + + @setup({"timeuntil14": "{{ a|timeuntil:b }}"}) + def test_timeuntil14(self): + output = self.engine.render_to_string( + "timeuntil14", {"a": self.today, "b": self.today - timedelta(hours=24)} + ) + self.assertEqual(output, "1\xa0day") + + @setup({"timeuntil15": "{{ a|timeuntil:b }}"}) + def test_naive_aware_type_error(self): + output = self.engine.render_to_string( + "timeuntil15", {"a": self.now, "b": self.now_tz_i} + ) + self.assertEqual(output, "") + + @setup({"timeuntil16": "{{ a|timeuntil:b }}"}) + def test_aware_naive_type_error(self): + output = self.engine.render_to_string( + "timeuntil16", {"a": self.now_tz_i, "b": self.now} + ) + self.assertEqual(output, "") + + +class FunctionTests(SimpleTestCase): + def test_until_now(self): + self.assertEqual(timeuntil_filter(datetime.now() + timedelta(1, 1)), "1\xa0day") + + def test_no_args(self): + self.assertEqual(timeuntil_filter(None), "") + + def test_explicit_date(self): + self.assertEqual( + timeuntil_filter(datetime(2005, 12, 30), datetime(2005, 12, 29)), "1\xa0day" + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_title.py b/testbed/django__django/tests/template_tests/filter_tests/test_title.py new file mode 100644 index 0000000000000000000000000000000000000000..ad2c7c9b28fb7bf12d31d87259232f81d418f31c --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_title.py @@ -0,0 +1,27 @@ +from django.template.defaultfilters import title +from django.test import SimpleTestCase + +from ..utils import setup + + +class TitleTests(SimpleTestCase): + @setup({"title1": "{{ a|title }}"}) + def test_title1(self): + output = self.engine.render_to_string("title1", {"a": "JOE'S CRAB SHACK"}) + self.assertEqual(output, "Joe's Crab Shack") + + @setup({"title2": "{{ a|title }}"}) + def test_title2(self): + output = self.engine.render_to_string("title2", {"a": "555 WEST 53RD STREET"}) + self.assertEqual(output, "555 West 53rd Street") + + +class FunctionTests(SimpleTestCase): + def test_title(self): + self.assertEqual(title("a nice title, isn't it?"), "A Nice Title, Isn't It?") + + def test_unicode(self): + self.assertEqual(title("discoth\xe8que"), "Discoth\xe8que") + + def test_non_string_input(self): + self.assertEqual(title(123), "123") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_truncatechars.py b/testbed/django__django/tests/template_tests/filter_tests/test_truncatechars.py new file mode 100644 index 0000000000000000000000000000000000000000..a444125cf896dd274f1ccdba721d5f4fea421a7d --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_truncatechars.py @@ -0,0 +1,24 @@ +from django.test import SimpleTestCase + +from ..utils import setup + + +class TruncatecharsTests(SimpleTestCase): + @setup({"truncatechars01": "{{ a|truncatechars:3 }}"}) + def test_truncatechars01(self): + output = self.engine.render_to_string( + "truncatechars01", {"a": "Testing, testing"} + ) + self.assertEqual(output, "Te…") + + @setup({"truncatechars02": "{{ a|truncatechars:7 }}"}) + def test_truncatechars02(self): + output = self.engine.render_to_string("truncatechars02", {"a": "Testing"}) + self.assertEqual(output, "Testing") + + @setup({"truncatechars03": "{{ a|truncatechars:'e' }}"}) + def test_fail_silently_incorrect_arg(self): + output = self.engine.render_to_string( + "truncatechars03", {"a": "Testing, testing"} + ) + self.assertEqual(output, "Testing, testing") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_truncatechars_html.py b/testbed/django__django/tests/template_tests/filter_tests/test_truncatechars_html.py new file mode 100644 index 0000000000000000000000000000000000000000..6c5fc3c883377acb4ec1c65a70a6925039f3a682 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_truncatechars_html.py @@ -0,0 +1,48 @@ +from django.template.defaultfilters import truncatechars_html +from django.test import SimpleTestCase + + +class FunctionTests(SimpleTestCase): + def test_truncate_zero(self): + self.assertEqual( + truncatechars_html( + '

    one two - three
    four
    five

    ', 0 + ), + "…", + ) + + def test_truncate(self): + self.assertEqual( + truncatechars_html( + '

    one two - three
    four
    five

    ', 4 + ), + "

    one…

    ", + ) + + def test_truncate2(self): + self.assertEqual( + truncatechars_html( + '

    one two - three
    four
    five

    ', 9 + ), + '

    one two …

    ', + ) + + def test_truncate3(self): + self.assertEqual( + truncatechars_html( + '

    one two - three
    four
    five

    ', 100 + ), + '

    one two - three
    four
    five

    ', + ) + + def test_truncate_unicode(self): + self.assertEqual( + truncatechars_html("\xc5ngstr\xf6m was here", 3), "\xc5n…" + ) + + def test_truncate_something(self): + self.assertEqual(truncatechars_html("abc", 3), "abc") + + def test_invalid_arg(self): + html = '

    one two - three
    four
    five

    ' + self.assertEqual(truncatechars_html(html, "a"), html) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_truncatewords.py b/testbed/django__django/tests/template_tests/filter_tests/test_truncatewords.py new file mode 100644 index 0000000000000000000000000000000000000000..e737a1e3f988ca2a5a3aab27724b68967eb4674e --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_truncatewords.py @@ -0,0 +1,56 @@ +from django.template.defaultfilters import truncatewords +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class TruncatewordsTests(SimpleTestCase): + @setup( + { + "truncatewords01": ( + '{% autoescape off %}{{ a|truncatewords:"2" }} {{ b|truncatewords:"2"}}' + "{% endautoescape %}" + ) + } + ) + def test_truncatewords01(self): + output = self.engine.render_to_string( + "truncatewords01", + {"a": "alpha & bravo", "b": mark_safe("alpha & bravo")}, + ) + self.assertEqual(output, "alpha & … alpha & …") + + @setup({"truncatewords02": '{{ a|truncatewords:"2" }} {{ b|truncatewords:"2"}}'}) + def test_truncatewords02(self): + output = self.engine.render_to_string( + "truncatewords02", + {"a": "alpha & bravo", "b": mark_safe("alpha & bravo")}, + ) + self.assertEqual(output, "alpha & … alpha & …") + + +class FunctionTests(SimpleTestCase): + def test_truncate(self): + self.assertEqual(truncatewords("A sentence with a few words in it", 1), "A …") + + def test_truncate2(self): + self.assertEqual( + truncatewords("A sentence with a few words in it", 5), + "A sentence with a few …", + ) + + def test_overtruncate(self): + self.assertEqual( + truncatewords("A sentence with a few words in it", 100), + "A sentence with a few words in it", + ) + + def test_invalid_number(self): + self.assertEqual( + truncatewords("A sentence with a few words in it", "not a number"), + "A sentence with a few words in it", + ) + + def test_non_string_input(self): + self.assertEqual(truncatewords(123, 2), "123") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_truncatewords_html.py b/testbed/django__django/tests/template_tests/filter_tests/test_truncatewords_html.py new file mode 100644 index 0000000000000000000000000000000000000000..32b7c81a76262e00223f2b2e1ccce36259f9e9ee --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_truncatewords_html.py @@ -0,0 +1,60 @@ +from django.template.defaultfilters import truncatewords_html +from django.test import SimpleTestCase + + +class FunctionTests(SimpleTestCase): + def test_truncate_zero(self): + self.assertEqual( + truncatewords_html( + '

    one two - three
    four
    five

    ', 0 + ), + "", + ) + + def test_truncate(self): + self.assertEqual( + truncatewords_html( + '

    one two - three
    four
    five

    ', 2 + ), + '

    one two …

    ', + ) + + def test_truncate2(self): + self.assertEqual( + truncatewords_html( + '

    one two - three
    four
    five

    ', 4 + ), + '

    one two - three …

    ', + ) + + def test_truncate3(self): + self.assertEqual( + truncatewords_html( + '

    one two - three
    four
    five

    ', 5 + ), + '

    one two - three
    four …

    ', + ) + + def test_truncate4(self): + self.assertEqual( + truncatewords_html( + '

    one two - three
    four
    five

    ', 100 + ), + '

    one two - three
    four
    five

    ', + ) + + def test_truncate_unicode(self): + self.assertEqual( + truncatewords_html("\xc5ngstr\xf6m was here", 1), "\xc5ngstr\xf6m …" + ) + + def test_truncate_complex(self): + self.assertEqual( + truncatewords_html( + "Buenos días! ¿Cómo está?", 3 + ), + "Buenos días! ¿Cómo …", + ) + + def test_invalid_arg(self): + self.assertEqual(truncatewords_html("

    string

    ", "a"), "

    string

    ") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_unordered_list.py b/testbed/django__django/tests/template_tests/filter_tests/test_unordered_list.py new file mode 100644 index 0000000000000000000000000000000000000000..1748a0fb5474ad76c6d9a747c85e843aebf1d7f6 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_unordered_list.py @@ -0,0 +1,185 @@ +from django.template.defaultfilters import unordered_list +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe +from django.utils.translation import gettext_lazy + +from ..utils import setup + + +class UnorderedListTests(SimpleTestCase): + @setup({"unordered_list01": "{{ a|unordered_list }}"}) + def test_unordered_list01(self): + output = self.engine.render_to_string("unordered_list01", {"a": ["x>", ["x>\n\t
      \n\t\t
    • <y
    • \n\t
    \n\t" + ) + + @setup( + { + "unordered_list02": ( + "{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}" + ) + } + ) + def test_unordered_list02(self): + output = self.engine.render_to_string("unordered_list02", {"a": ["x>", ["x>\n\t
      \n\t\t
    • \n\t
    \n\t") + + @setup({"unordered_list03": "{{ a|unordered_list }}"}) + def test_unordered_list03(self): + output = self.engine.render_to_string( + "unordered_list03", {"a": ["x>", [mark_safe("x>\n\t
      \n\t\t
    • \n\t
    \n\t" + ) + + @setup( + { + "unordered_list04": ( + "{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}" + ) + } + ) + def test_unordered_list04(self): + output = self.engine.render_to_string( + "unordered_list04", {"a": ["x>", [mark_safe("x>\n\t
      \n\t\t
    • \n\t
    \n\t") + + @setup( + { + "unordered_list05": ( + "{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}" + ) + } + ) + def test_unordered_list05(self): + output = self.engine.render_to_string("unordered_list05", {"a": ["x>", ["x>\n\t
      \n\t\t
    • \n\t
    \n\t") + + +class FunctionTests(SimpleTestCase): + def test_list(self): + self.assertEqual( + unordered_list(["item 1", "item 2"]), "\t
  • item 1
  • \n\t
  • item 2
  • " + ) + + def test_list_gettext(self): + self.assertEqual( + unordered_list(["item 1", gettext_lazy("item 2")]), + "\t
  • item 1
  • \n\t
  • item 2
  • ", + ) + + def test_nested(self): + self.assertEqual( + unordered_list(["item 1", ["item 1.1"]]), + "\t
  • item 1\n\t
      \n\t\t
    • item 1.1
    • \n\t
    \n\t
  • ", + ) + + def test_nested2(self): + self.assertEqual( + unordered_list(["item 1", ["item 1.1", "item1.2"], "item 2"]), + "\t
  • item 1\n\t
      \n\t\t
    • item 1.1
    • \n\t\t
    • item1.2" + "
    • \n\t
    \n\t
  • \n\t
  • item 2
  • ", + ) + + def test_nested3(self): + self.assertEqual( + unordered_list(["item 1", "item 2", ["item 2.1"]]), + "\t
  • item 1
  • \n\t
  • item 2\n\t
      \n\t\t
    • item 2.1" + "
    • \n\t
    \n\t
  • ", + ) + + def test_nested_multiple(self): + self.assertEqual( + unordered_list(["item 1", ["item 1.1", ["item 1.1.1", ["item 1.1.1.1"]]]]), + "\t
  • item 1\n\t
      \n\t\t
    • item 1.1\n\t\t
        \n\t\t\t
      • " + "item 1.1.1\n\t\t\t
          \n\t\t\t\t
        • item 1.1.1.1
        • \n\t\t\t" + "
        \n\t\t\t
      • \n\t\t
      \n\t\t
    • \n\t
    \n\t
  • ", + ) + + def test_nested_multiple2(self): + self.assertEqual( + unordered_list(["States", ["Kansas", ["Lawrence", "Topeka"], "Illinois"]]), + "\t
  • States\n\t
      \n\t\t
    • Kansas\n\t\t
        \n\t\t\t
      • " + "Lawrence
      • \n\t\t\t
      • Topeka
      • \n\t\t
      \n\t\t
    • " + "\n\t\t
    • Illinois
    • \n\t
    \n\t
  • ", + ) + + def test_autoescape(self): + self.assertEqual( + unordered_list(["item 1", "item 2"]), + "\t
  • <a>item 1</a>
  • \n\t
  • item 2
  • ", + ) + + def test_autoescape_off(self): + self.assertEqual( + unordered_list(["item 1", "item 2"], autoescape=False), + "\t
  • item 1
  • \n\t
  • item 2
  • ", + ) + + def test_ulitem(self): + class ULItem: + def __init__(self, title): + self.title = title + + def __str__(self): + return "ulitem-%s" % str(self.title) + + a = ULItem("a") + b = ULItem("b") + c = ULItem("c") + self.assertEqual( + unordered_list([a, b, c]), + "\t
  • ulitem-a
  • \n\t
  • ulitem-b
  • \n\t" + "
  • ulitem-<a>c</a>
  • ", + ) + + def item_generator(): + yield from (a, b, c) + + self.assertEqual( + unordered_list(item_generator()), + "\t
  • ulitem-a
  • \n\t
  • ulitem-b
  • \n\t" + "
  • ulitem-<a>c</a>
  • ", + ) + + def test_nested_generators(self): + def inner_generator(): + yield from ("B", "C") + + def item_generator(): + yield "A" + yield inner_generator() + yield "D" + + self.assertEqual( + unordered_list(item_generator()), + "\t
  • A\n\t
      \n\t\t
    • B
    • \n\t\t
    • C
    • \n\t
    \n\t
  • \n\t" + "
  • D
  • ", + ) + + def test_ulitem_autoescape_off(self): + class ULItem: + def __init__(self, title): + self.title = title + + def __str__(self): + return "ulitem-%s" % str(self.title) + + a = ULItem("a") + b = ULItem("b") + c = ULItem("c") + self.assertEqual( + unordered_list([a, b, c], autoescape=False), + "\t
  • ulitem-a
  • \n\t
  • ulitem-b
  • \n\t
  • ulitem-c
  • ", + ) + + def item_generator(): + yield from (a, b, c) + + self.assertEqual( + unordered_list(item_generator(), autoescape=False), + "\t
  • ulitem-a
  • \n\t
  • ulitem-b
  • \n\t
  • ulitem-c
  • ", + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_upper.py b/testbed/django__django/tests/template_tests/filter_tests/test_upper.py new file mode 100644 index 0000000000000000000000000000000000000000..90f14a794a262fd7ccecaeb5468525c9dc5089e5 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_upper.py @@ -0,0 +1,44 @@ +from django.template.defaultfilters import upper +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class UpperTests(SimpleTestCase): + """ + The "upper" filter messes up entities (which are case-sensitive), + so it's not safe for non-escaping purposes. + """ + + @setup( + { + "upper01": ( + "{% autoescape off %}{{ a|upper }} {{ b|upper }}{% endautoescape %}" + ) + } + ) + def test_upper01(self): + output = self.engine.render_to_string( + "upper01", {"a": "a & b", "b": mark_safe("a & b")} + ) + self.assertEqual(output, "A & B A & B") + + @setup({"upper02": "{{ a|upper }} {{ b|upper }}"}) + def test_upper02(self): + output = self.engine.render_to_string( + "upper02", {"a": "a & b", "b": mark_safe("a & b")} + ) + self.assertEqual(output, "A & B A &AMP; B") + + +class FunctionTests(SimpleTestCase): + def test_upper(self): + self.assertEqual(upper("Mixed case input"), "MIXED CASE INPUT") + + def test_unicode(self): + # lowercase e umlaut + self.assertEqual(upper("\xeb"), "\xcb") + + def test_non_string_input(self): + self.assertEqual(upper(123), "123") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_urlencode.py b/testbed/django__django/tests/template_tests/filter_tests/test_urlencode.py new file mode 100644 index 0000000000000000000000000000000000000000..d3c8d2931a80192b530a9bd302a50b2c7cb12914 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_urlencode.py @@ -0,0 +1,24 @@ +from django.template.defaultfilters import urlencode +from django.test import SimpleTestCase + +from ..utils import setup + + +class UrlencodeTests(SimpleTestCase): + @setup({"urlencode01": "{{ url|urlencode }}"}) + def test_urlencode01(self): + output = self.engine.render_to_string("urlencode01", {"url": '/test&"/me?/'}) + self.assertEqual(output, "/test%26%22/me%3F/") + + @setup({"urlencode02": '/test/{{ urlbit|urlencode:"" }}/'}) + def test_urlencode02(self): + output = self.engine.render_to_string("urlencode02", {"urlbit": "escape/slash"}) + self.assertEqual(output, "/test/escape%2Fslash/") + + +class FunctionTests(SimpleTestCase): + def test_urlencode(self): + self.assertEqual(urlencode("fran\xe7ois & jill"), "fran%C3%A7ois%20%26%20jill") + + def test_non_string_input(self): + self.assertEqual(urlencode(1), "1") diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_urlize.py b/testbed/django__django/tests/template_tests/filter_tests/test_urlize.py new file mode 100644 index 0000000000000000000000000000000000000000..abc227ba6a78b69830efa9655c47e51b7de6b9b3 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_urlize.py @@ -0,0 +1,447 @@ +from django.template.defaultfilters import urlize +from django.test import SimpleTestCase +from django.utils.functional import lazy +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class UrlizeTests(SimpleTestCase): + @setup( + { + "urlize01": ( + "{% autoescape off %}{{ a|urlize }} {{ b|urlize }}{% endautoescape %}" + ) + } + ) + def test_urlize01(self): + output = self.engine.render_to_string( + "urlize01", + { + "a": "http://example.com/?x=&y=", + "b": mark_safe("http://example.com?x=&y=<2>"), + }, + ) + self.assertEqual( + output, + '' + "http://example.com/?x=&y= " + '' + "http://example.com?x=&y=<2>", + ) + + @setup({"urlize02": "{{ a|urlize }} {{ b|urlize }}"}) + def test_urlize02(self): + output = self.engine.render_to_string( + "urlize02", + { + "a": "http://example.com/?x=&y=", + "b": mark_safe("http://example.com?x=&y="), + }, + ) + self.assertEqual( + output, + '' + "http://example.com/?x=&y= " + '' + "http://example.com?x=&y=", + ) + + @setup({"urlize03": "{% autoescape off %}{{ a|urlize }}{% endautoescape %}"}) + def test_urlize03(self): + output = self.engine.render_to_string("urlize03", {"a": mark_safe("a & b")}) + self.assertEqual(output, "a & b") + + @setup({"urlize04": "{{ a|urlize }}"}) + def test_urlize04(self): + output = self.engine.render_to_string("urlize04", {"a": mark_safe("a & b")}) + self.assertEqual(output, "a & b") + + # This will lead to a nonsense result, but at least it won't be + # exploitable for XSS purposes when auto-escaping is on. + @setup({"urlize05": "{% autoescape off %}{{ a|urlize }}{% endautoescape %}"}) + def test_urlize05(self): + output = self.engine.render_to_string( + "urlize05", {"a": ""} + ) + self.assertEqual(output, "") + + @setup({"urlize06": "{{ a|urlize }}"}) + def test_urlize06(self): + output = self.engine.render_to_string( + "urlize06", {"a": ""} + ) + self.assertEqual(output, "<script>alert('foo')</script>") + + # mailto: testing for urlize + @setup({"urlize07": "{{ a|urlize }}"}) + def test_urlize07(self): + output = self.engine.render_to_string( + "urlize07", {"a": "Email me at me@example.com"} + ) + self.assertEqual( + output, + 'Email me at me@example.com', + ) + + @setup({"urlize08": "{{ a|urlize }}"}) + def test_urlize08(self): + output = self.engine.render_to_string( + "urlize08", {"a": "Email me at "} + ) + self.assertEqual( + output, + 'Email me at <me@example.com>', + ) + + @setup({"urlize09": "{% autoescape off %}{{ a|urlize }}{% endautoescape %}"}) + def test_urlize09(self): + output = self.engine.render_to_string( + "urlize09", {"a": "http://example.com/?x=&y=<2>"} + ) + self.assertEqual( + output, + '' + "http://example.com/?x=&y=<2>", + ) + + +class FunctionTests(SimpleTestCase): + def test_urls(self): + self.assertEqual( + urlize("http://google.com"), + 'http://google.com', + ) + self.assertEqual( + urlize("http://google.com/"), + 'http://google.com/', + ) + self.assertEqual( + urlize("www.google.com"), + 'www.google.com', + ) + self.assertEqual( + urlize("djangoproject.org"), + 'djangoproject.org', + ) + self.assertEqual( + urlize("djangoproject.org/"), + 'djangoproject.org/', + ) + + def test_url_split_chars(self): + # Quotes (single and double) and angle brackets shouldn't be considered + # part of URLs. + self.assertEqual( + urlize('www.server.com"abc'), + 'www.server.com"' + "abc", + ) + self.assertEqual( + urlize("www.server.com'abc"), + 'www.server.com'' + "abc", + ) + self.assertEqual( + urlize("www.server.comwww.server.com<abc', + ) + self.assertEqual( + urlize("www.server.com>abc"), + 'www.server.com>abc', + ) + + def test_email(self): + self.assertEqual( + urlize("info@djangoproject.org"), + 'info@djangoproject.org', + ) + + def test_word_with_dot(self): + self.assertEqual(urlize("some.organization"), "some.organization"), + + def test_https(self): + self.assertEqual( + urlize("https://google.com"), + 'https://google.com', + ) + + def test_quoting(self): + """ + #9655 - Check urlize doesn't overquote already quoted urls. The + teststring is the urlquoted version of 'http://hi.baidu.com/重新开始' + """ + self.assertEqual( + urlize("http://hi.baidu.com/%E9%87%8D%E6%96%B0%E5%BC%80%E5%A7%8B"), + 'http://hi.baidu.com/%E9%87%8D%E6%96%B0%E5%BC%80%E5%A7%8B' + "", + ) + + def test_urlencoded(self): + self.assertEqual( + urlize("www.mystore.com/30%OffCoupons!"), + '' + "www.mystore.com/30%OffCoupons!", + ) + self.assertEqual( + urlize("https://en.wikipedia.org/wiki/Caf%C3%A9"), + '' + "https://en.wikipedia.org/wiki/Caf%C3%A9", + ) + + def test_unicode(self): + self.assertEqual( + urlize("https://en.wikipedia.org/wiki/Café"), + '' + "https://en.wikipedia.org/wiki/Café", + ) + + def test_parenthesis(self): + """ + #11911 - Check urlize keeps balanced parentheses + """ + self.assertEqual( + urlize("https://en.wikipedia.org/wiki/Django_(web_framework)"), + 'https://en.wikipedia.org/wiki/Django_(web_framework)', + ) + self.assertEqual( + urlize("(see https://en.wikipedia.org/wiki/Django_(web_framework))"), + '(see https://en.wikipedia.org/wiki/Django_(web_framework))', + ) + + def test_nofollow(self): + """ + #12183 - Check urlize adds nofollow properly - see #12183 + """ + self.assertEqual( + urlize("foo@bar.com or www.bar.com"), + 'foo@bar.com or ' + 'www.bar.com', + ) + + def test_idn(self): + """ + #13704 - Check urlize handles IDN correctly + """ + self.assertEqual( + urlize("http://c✶.ws"), + 'http://c✶.ws', + ) + self.assertEqual( + urlize("www.c✶.ws"), + 'www.c✶.ws', + ) + self.assertEqual( + urlize("c✶.org"), 'c✶.org' + ) + self.assertEqual( + urlize("info@c✶.org"), 'info@c✶.org' + ) + + def test_malformed(self): + """ + #16395 - Check urlize doesn't highlight malformed URIs + """ + self.assertEqual(urlize("http:///www.google.com"), "http:///www.google.com") + self.assertEqual(urlize("http://.google.com"), "http://.google.com") + self.assertEqual(urlize("http://@foo.com"), "http://@foo.com") + + def test_tlds(self): + """ + #16656 - Check urlize accepts more TLDs + """ + self.assertEqual( + urlize("usa.gov"), 'usa.gov' + ) + + def test_invalid_email(self): + """ + #17592 - Check urlize don't crash on invalid email with dot-starting + domain + """ + self.assertEqual(urlize("email@.stream.ru"), "email@.stream.ru") + + def test_uppercase(self): + """ + #18071 - Check urlize accepts uppercased URL schemes + """ + self.assertEqual( + urlize("HTTPS://github.com/"), + 'HTTPS://github.com/', + ) + + def test_trailing_period(self): + """ + #18644 - Check urlize trims trailing period when followed by parenthesis + """ + self.assertEqual( + urlize("(Go to http://www.example.com/foo.)"), + '(Go to ' + "http://www.example.com/foo.)", + ) + + def test_trailing_multiple_punctuation(self): + self.assertEqual( + urlize("A test http://testing.com/example.."), + 'A test ' + "http://testing.com/example..", + ) + self.assertEqual( + urlize("A test http://testing.com/example!!"), + 'A test ' + "http://testing.com/example!!", + ) + self.assertEqual( + urlize("A test http://testing.com/example!!!"), + 'A test ' + "http://testing.com/example!!!", + ) + self.assertEqual( + urlize('A test http://testing.com/example.,:;)"!'), + 'A test ' + "http://testing.com/example.,:;)"!", + ) + + def test_brackets(self): + """ + #19070 - Check urlize handles brackets properly + """ + self.assertEqual( + urlize("[see www.example.com]"), + '[see www.example.com]', + ) + self.assertEqual( + urlize("see test[at[example.com"), + 'see ' + "test[at[example.com", + ) + self.assertEqual( + urlize("[http://168.192.0.1](http://168.192.0.1)"), + '[' + "http://168.192.0.1](http://168.192.0.1)", + ) + + def test_wrapping_characters(self): + wrapping_chars = ( + ("()", ("(", ")")), + ("<>", ("<", ">")), + ("[]", ("[", "]")), + ('""', (""", """)), + ("''", ("'", "'")), + ) + for wrapping_in, (start_out, end_out) in wrapping_chars: + with self.subTest(wrapping_in=wrapping_in): + start_in, end_in = wrapping_in + self.assertEqual( + urlize(start_in + "https://www.example.org/" + end_in), + f'{start_out}' + f"https://www.example.org/{end_out}", + ) + + def test_ipv4(self): + self.assertEqual( + urlize("http://192.168.0.15/api/9"), + '' + "http://192.168.0.15/api/9", + ) + + def test_ipv6(self): + self.assertEqual( + urlize("http://[2001:db8:cafe::2]/api/9"), + '' + "http://[2001:db8:cafe::2]/api/9", + ) + + def test_quotation_marks(self): + """ + #20364 - Check urlize correctly include quotation marks in links + """ + self.assertEqual( + urlize('before "hi@example.com" afterward', autoescape=False), + 'before "hi@example.com" afterward', + ) + self.assertEqual( + urlize('before hi@example.com" afterward', autoescape=False), + 'before hi@example.com" afterward', + ) + self.assertEqual( + urlize('before "hi@example.com afterward', autoescape=False), + 'before "hi@example.com afterward', + ) + self.assertEqual( + urlize("before 'hi@example.com' afterward", autoescape=False), + "before 'hi@example.com' afterward", + ) + self.assertEqual( + urlize("before hi@example.com' afterward", autoescape=False), + 'before hi@example.com\' afterward', + ) + self.assertEqual( + urlize("before 'hi@example.com afterward", autoescape=False), + 'before \'hi@example.com afterward', + ) + + def test_quote_commas(self): + """ + #20364 - Check urlize copes with commas following URLs in quotes + """ + self.assertEqual( + urlize( + 'Email us at "hi@example.com", or phone us at +xx.yy', autoescape=False + ), + 'Email us at "hi@example.com", or ' + "phone us at +xx.yy", + ) + + def test_exclamation_marks(self): + """ + #23715 - Check urlize correctly handles exclamation marks after TLDs + or query string + """ + self.assertEqual( + urlize("Go to djangoproject.com! and enjoy."), + 'Go to djangoproject.com' + "! and enjoy.", + ) + self.assertEqual( + urlize("Search for google.com/?q=! and see."), + 'Search for google.com/?q=' + "! and see.", + ) + self.assertEqual( + urlize("Search for google.com/?q=dj!`? and see."), + 'Search for ' + "google.com/?q=dj!`? and see.", + ) + self.assertEqual( + urlize("Search for google.com/?q=dj!`?! and see."), + 'Search for ' + "google.com/?q=dj!`?! and see.", + ) + + def test_non_string_input(self): + self.assertEqual(urlize(123), "123") + + def test_autoescape(self): + self.assertEqual( + urlize('foobarbuz'), + 'foo<a href=" google.com' + " ">bar</a>buz", + ) + + def test_autoescape_off(self): + self.assertEqual( + urlize('foobarbuz', autoescape=False), + 'foogoogle.com ">' + "barbuz", + ) + + def test_lazystring(self): + prepend_www = lazy(lambda url: "www." + url, str) + self.assertEqual( + urlize(prepend_www("google.com")), + 'www.google.com', + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_urlizetrunc.py b/testbed/django__django/tests/template_tests/filter_tests/test_urlizetrunc.py new file mode 100644 index 0000000000000000000000000000000000000000..752ee3571ecab8cae65fb7fd5195ff854f65209c --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_urlizetrunc.py @@ -0,0 +1,105 @@ +from django.template.defaultfilters import urlizetrunc +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class UrlizetruncTests(SimpleTestCase): + @setup( + { + "urlizetrunc01": ( + '{% autoescape off %}{{ a|urlizetrunc:"8" }} {{ b|urlizetrunc:"8" }}' + "{% endautoescape %}" + ) + } + ) + def test_urlizetrunc01(self): + output = self.engine.render_to_string( + "urlizetrunc01", + { + "a": '"Unsafe" http://example.com/x=&y=', + "b": mark_safe(""Safe" http://example.com?x=&y="), + }, + ) + self.assertEqual( + output, + '"Unsafe" ' + 'http://… ' + ""Safe" " + 'http://…', + ) + + @setup({"urlizetrunc02": '{{ a|urlizetrunc:"8" }} {{ b|urlizetrunc:"8" }}'}) + def test_urlizetrunc02(self): + output = self.engine.render_to_string( + "urlizetrunc02", + { + "a": '"Unsafe" http://example.com/x=&y=', + "b": mark_safe(""Safe" http://example.com?x=&y="), + }, + ) + self.assertEqual( + output, + '"Unsafe" ' + "http://… " + '"Safe" ' + "http://…", + ) + + +class FunctionTests(SimpleTestCase): + def test_truncate(self): + uri = "http://31characteruri.com/test/" + self.assertEqual(len(uri), 31) + + self.assertEqual( + urlizetrunc(uri, 31), + '' + "http://31characteruri.com/test/", + ) + + self.assertEqual( + urlizetrunc(uri, 30), + '' + "http://31characteruri.com/tes…", + ) + + self.assertEqual( + urlizetrunc(uri, 1), + '', + ) + + def test_overtruncate(self): + self.assertEqual( + urlizetrunc("http://short.com/", 20), + 'http://short.com/', + ) + + def test_query_string(self): + self.assertEqual( + urlizetrunc( + "http://www.google.co.uk/search?hl=en&q=some+long+url&btnG=Search" + "&meta=", + 20, + ), + 'http://www.google.c…', + ) + + def test_non_string_input(self): + self.assertEqual(urlizetrunc(123, 1), "123") + + def test_autoescape(self): + self.assertEqual( + urlizetrunc('foobarbuz', 10), + 'foo<a href=" google.com' + " ">bar</a>buz", + ) + + def test_autoescape_off(self): + self.assertEqual( + urlizetrunc('foobarbuz', 9, autoescape=False), + 'foogoogle.c… ">' + "barbuz", + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_wordcount.py b/testbed/django__django/tests/template_tests/filter_tests/test_wordcount.py new file mode 100644 index 0000000000000000000000000000000000000000..d3a1eb05f104c32c4b3176000ea306629f8d479c --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_wordcount.py @@ -0,0 +1,42 @@ +from django.template.defaultfilters import wordcount +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class WordcountTests(SimpleTestCase): + @setup( + { + "wordcount01": ( + "{% autoescape off %}{{ a|wordcount }} {{ b|wordcount }}" + "{% endautoescape %}" + ) + } + ) + def test_wordcount01(self): + output = self.engine.render_to_string( + "wordcount01", {"a": "a & b", "b": mark_safe("a & b")} + ) + self.assertEqual(output, "3 3") + + @setup({"wordcount02": "{{ a|wordcount }} {{ b|wordcount }}"}) + def test_wordcount02(self): + output = self.engine.render_to_string( + "wordcount02", {"a": "a & b", "b": mark_safe("a & b")} + ) + self.assertEqual(output, "3 3") + + +class FunctionTests(SimpleTestCase): + def test_empty_string(self): + self.assertEqual(wordcount(""), 0) + + def test_count_one(self): + self.assertEqual(wordcount("oneword"), 1) + + def test_count_multiple(self): + self.assertEqual(wordcount("lots of words"), 3) + + def test_non_string_input(self): + self.assertEqual(wordcount(123), 1) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_wordwrap.py b/testbed/django__django/tests/template_tests/filter_tests/test_wordwrap.py new file mode 100644 index 0000000000000000000000000000000000000000..88fbd274da945deb9915b99c51916ec5718116db --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_wordwrap.py @@ -0,0 +1,80 @@ +from django.template.defaultfilters import wordwrap +from django.test import SimpleTestCase +from django.utils.functional import lazystr +from django.utils.safestring import mark_safe + +from ..utils import setup + + +class WordwrapTests(SimpleTestCase): + @setup( + { + "wordwrap01": ( + '{% autoescape off %}{{ a|wordwrap:"3" }} {{ b|wordwrap:"3" }}' + "{% endautoescape %}" + ) + } + ) + def test_wordwrap01(self): + output = self.engine.render_to_string( + "wordwrap01", {"a": "a & b", "b": mark_safe("a & b")} + ) + self.assertEqual(output, "a &\nb a &\nb") + + @setup({"wordwrap02": '{{ a|wordwrap:"3" }} {{ b|wordwrap:"3" }}'}) + def test_wordwrap02(self): + output = self.engine.render_to_string( + "wordwrap02", {"a": "a & b", "b": mark_safe("a & b")} + ) + self.assertEqual(output, "a &\nb a &\nb") + + +class FunctionTests(SimpleTestCase): + def test_wrap(self): + self.assertEqual( + wordwrap( + "this is a long paragraph of text that really needs to be wrapped I'm " + "afraid", + 14, + ), + "this is a long\nparagraph of\ntext that\nreally needs\nto be wrapped\n" + "I'm afraid", + ) + + def test_indent(self): + self.assertEqual( + wordwrap( + "this is a short paragraph of text.\n But this line should be " + "indented", + 14, + ), + "this is a\nshort\nparagraph of\ntext.\n But this\nline should be\n" + "indented", + ) + + def test_indent2(self): + self.assertEqual( + wordwrap( + "this is a short paragraph of text.\n But this line should be " + "indented", + 15, + ), + "this is a short\nparagraph of\ntext.\n But this line\nshould be\n" + "indented", + ) + + def test_non_string_input(self): + self.assertEqual(wordwrap(123, 2), "123") + + def test_wrap_lazy_string(self): + self.assertEqual( + wordwrap( + lazystr( + "this is a long paragraph of text that really needs to be wrapped " + "I'm afraid" + ), + 14, + ), + "this is a long\nparagraph of\ntext that\nreally needs\nto be wrapped\n" + "I'm afraid", + ) diff --git a/testbed/django__django/tests/template_tests/filter_tests/test_yesno.py b/testbed/django__django/tests/template_tests/filter_tests/test_yesno.py new file mode 100644 index 0000000000000000000000000000000000000000..d26b2a76842c1a0dcfcca8ac051d8897590777ec --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/test_yesno.py @@ -0,0 +1,41 @@ +from django.template.defaultfilters import yesno +from django.test import SimpleTestCase + +from ..utils import setup + + +class YesNoTests(SimpleTestCase): + @setup({"t": '{{ var|yesno:"yup,nup,mup" }} {{ var|yesno }}'}) + def test_true(self): + output = self.engine.render_to_string("t", {"var": True}) + self.assertEqual(output, "yup yes") + + +class FunctionTests(SimpleTestCase): + def test_true(self): + self.assertEqual(yesno(True), "yes") + + def test_false(self): + self.assertEqual(yesno(False), "no") + + def test_none(self): + self.assertEqual(yesno(None), "maybe") + + def test_true_arguments(self): + self.assertEqual(yesno(True, "certainly,get out of town,perhaps"), "certainly") + + def test_false_arguments(self): + self.assertEqual( + yesno(False, "certainly,get out of town,perhaps"), "get out of town" + ) + + def test_none_two_arguments(self): + self.assertEqual(yesno(None, "certainly,get out of town"), "get out of town") + + def test_none_three_arguments(self): + self.assertEqual(yesno(None, "certainly,get out of town,perhaps"), "perhaps") + + def test_invalid_value(self): + self.assertIs(yesno(True, "yes"), True) + self.assertIs(yesno(False, "yes"), False) + self.assertIsNone(yesno(None, "yes")) diff --git a/testbed/django__django/tests/template_tests/filter_tests/timezone_utils.py b/testbed/django__django/tests/template_tests/filter_tests/timezone_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a990d164ae85bac7915d5ac47d47826ba0472bb8 --- /dev/null +++ b/testbed/django__django/tests/template_tests/filter_tests/timezone_utils.py @@ -0,0 +1,18 @@ +from datetime import date, datetime + +from django.test import SimpleTestCase +from django.utils import timezone + + +class TimezoneTestCase(SimpleTestCase): + def setUp(self): + self.now = datetime.now() + self.now_tz = timezone.make_aware( + self.now, + timezone.get_default_timezone(), + ) + self.now_tz_i = timezone.localtime( + self.now_tz, + timezone.get_fixed_timezone(195), + ) + self.today = date.today() diff --git a/testbed/django__django/tests/template_tests/jinja2/template_tests/using.html b/testbed/django__django/tests/template_tests/jinja2/template_tests/using.html new file mode 100644 index 0000000000000000000000000000000000000000..8ce973e958c3ed2f286da03958b4ae2abf4051f0 --- /dev/null +++ b/testbed/django__django/tests/template_tests/jinja2/template_tests/using.html @@ -0,0 +1 @@ +Jinja2 diff --git a/testbed/django__django/tests/template_tests/other_templates/priority/foo.html b/testbed/django__django/tests/template_tests/other_templates/priority/foo.html new file mode 100644 index 0000000000000000000000000000000000000000..4d8014f9ad7fd4c37ec31fecb2874bd37815475e --- /dev/null +++ b/testbed/django__django/tests/template_tests/other_templates/priority/foo.html @@ -0,0 +1 @@ +priority diff --git a/testbed/django__django/tests/template_tests/other_templates/test_dirs.html b/testbed/django__django/tests/template_tests/other_templates/test_dirs.html new file mode 100644 index 0000000000000000000000000000000000000000..d99b954618a683f199b9b97f0584e0eb0b1aca46 --- /dev/null +++ b/testbed/django__django/tests/template_tests/other_templates/test_dirs.html @@ -0,0 +1 @@ +spam eggs{{ obj }} diff --git a/testbed/django__django/tests/template_tests/recursive_templates/fs/extend-missing.html b/testbed/django__django/tests/template_tests/recursive_templates/fs/extend-missing.html new file mode 100644 index 0000000000000000000000000000000000000000..e3c106e2c1e6de327bf5f1ee8819e998a3118be0 --- /dev/null +++ b/testbed/django__django/tests/template_tests/recursive_templates/fs/extend-missing.html @@ -0,0 +1 @@ +{% extends "missing.html" %} diff --git a/testbed/django__django/tests/template_tests/recursive_templates/fs/one.html b/testbed/django__django/tests/template_tests/recursive_templates/fs/one.html new file mode 100644 index 0000000000000000000000000000000000000000..f72e72e1a6de8d940890fb7010f8388804bbba6b --- /dev/null +++ b/testbed/django__django/tests/template_tests/recursive_templates/fs/one.html @@ -0,0 +1,3 @@ +{% extends "two.html" %} + +{% block content %}{{ block.super }} one{% endblock %} diff --git a/testbed/django__django/tests/template_tests/recursive_templates/fs/other-recursive.html b/testbed/django__django/tests/template_tests/recursive_templates/fs/other-recursive.html new file mode 100644 index 0000000000000000000000000000000000000000..84e5ac9eae907c51fddab0541334460968815a5f --- /dev/null +++ b/testbed/django__django/tests/template_tests/recursive_templates/fs/other-recursive.html @@ -0,0 +1 @@ +{% extends "recursive.html" %} diff --git a/testbed/django__django/tests/template_tests/recursive_templates/fs/recursive.html b/testbed/django__django/tests/template_tests/recursive_templates/fs/recursive.html new file mode 100644 index 0000000000000000000000000000000000000000..cbf242d8d564fdbdcdba075c71efb67d524dc023 --- /dev/null +++ b/testbed/django__django/tests/template_tests/recursive_templates/fs/recursive.html @@ -0,0 +1,3 @@ +{% extends "recursive.html" %} + +{% block content %}{{ block.super }} fs/recursive{% endblock %} diff --git a/testbed/django__django/tests/template_tests/recursive_templates/fs/self.html b/testbed/django__django/tests/template_tests/recursive_templates/fs/self.html new file mode 100644 index 0000000000000000000000000000000000000000..f3e5bbf3016717ce528196697b55a71ee5312ef4 --- /dev/null +++ b/testbed/django__django/tests/template_tests/recursive_templates/fs/self.html @@ -0,0 +1 @@ +{% extends "self.html" %} diff --git a/testbed/django__django/tests/template_tests/recursive_templates/fs/three.html b/testbed/django__django/tests/template_tests/recursive_templates/fs/three.html new file mode 100644 index 0000000000000000000000000000000000000000..360aeeea5e88c1c9a8316eeb0e597d023c640bc7 --- /dev/null +++ b/testbed/django__django/tests/template_tests/recursive_templates/fs/three.html @@ -0,0 +1 @@ +{% block content %}three{% endblock %} diff --git a/testbed/django__django/tests/template_tests/recursive_templates/fs/two.html b/testbed/django__django/tests/template_tests/recursive_templates/fs/two.html new file mode 100644 index 0000000000000000000000000000000000000000..b9b80ec7a0b1bba897010d59345cb015b210aaaf --- /dev/null +++ b/testbed/django__django/tests/template_tests/recursive_templates/fs/two.html @@ -0,0 +1,3 @@ +{% extends "three.html" %} + +{% block content %}{{ block.super }} two{% endblock %} diff --git a/testbed/django__django/tests/template_tests/recursive_templates/fs2/recursive.html b/testbed/django__django/tests/template_tests/recursive_templates/fs2/recursive.html new file mode 100644 index 0000000000000000000000000000000000000000..52a338ca9f8ddb1f2e1229c6f53d46a12c00d641 --- /dev/null +++ b/testbed/django__django/tests/template_tests/recursive_templates/fs2/recursive.html @@ -0,0 +1,3 @@ +{% extends "recursive.html" %} + +{% block content %}{{ block.super }} fs2/recursive{% endblock %} diff --git a/testbed/django__django/tests/template_tests/recursive_templates/fs3/recursive.html b/testbed/django__django/tests/template_tests/recursive_templates/fs3/recursive.html new file mode 100644 index 0000000000000000000000000000000000000000..aefbad4582ba5e55a69d08d6df9363c907acb5ad --- /dev/null +++ b/testbed/django__django/tests/template_tests/recursive_templates/fs3/recursive.html @@ -0,0 +1 @@ +{% block content %}fs3/recursive{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/inc1.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/inc1.html new file mode 100644 index 0000000000000000000000000000000000000000..a854bef662c7b440097decbf4098ef5b05678d18 --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/inc1.html @@ -0,0 +1 @@ +{% include "./../../three.html" %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/inc2.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/inc2.html new file mode 100644 index 0000000000000000000000000000000000000000..376f47975e44ac318ddd77a77164737ef2e2f886 --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/inc2.html @@ -0,0 +1 @@ +{% include "./include_content.html" %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/inc3.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/inc3.html new file mode 100644 index 0000000000000000000000000000000000000000..7a8374df518de41da30b0a318552393aaf486a77 --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/inc3.html @@ -0,0 +1 @@ +{% include tmpl %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/include_content.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/include_content.html new file mode 100644 index 0000000000000000000000000000000000000000..132d8b814533db904ea33f50cf372a68cee2f31d --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/include_content.html @@ -0,0 +1 @@ +dir2 include diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/one.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/one.html new file mode 100644 index 0000000000000000000000000000000000000000..11e6424213fc57937cbcaf3ae31ed846d28df794 --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/dir2/one.html @@ -0,0 +1,3 @@ +{% extends "./../../one.html" %} + +{% block content %}{{ block.super }} dir2 one{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/looped.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/looped.html new file mode 100644 index 0000000000000000000000000000000000000000..8e9d8ac4e5c1f6b068276dbc9a44b0276fe13858 --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/looped.html @@ -0,0 +1,3 @@ +{% extends "./dir2/../looped.html" %} + +{% block content %}{{ block.super }} dir1 three{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/one.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/one.html new file mode 100644 index 0000000000000000000000000000000000000000..3b89c233306db8cbce325643295e851b2b4efc6f --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/one.html @@ -0,0 +1,3 @@ +{% extends "./../one.html" %} + +{% block content %}{{ block.super }} dir1 one{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/one1.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/one1.html new file mode 100644 index 0000000000000000000000000000000000000000..9f60109975980559a76880fe8a6aef1eb044499d --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/one1.html @@ -0,0 +1,3 @@ +{% extends './../one.html' %} + +{% block content %}{{ block.super }} dir1 one{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/one2.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/one2.html new file mode 100644 index 0000000000000000000000000000000000000000..1ca9f17b210b663bffbd9e81ec27287cbe47eeb7 --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/one2.html @@ -0,0 +1,3 @@ +{% extends '../one.html' %} + +{% block content %}{{ block.super }} dir1 one{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/one3.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/one3.html new file mode 100644 index 0000000000000000000000000000000000000000..3df6195fbbaad7083cede89c91e4d168b057fede --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/one3.html @@ -0,0 +1,3 @@ +{% extends "../one.html" %} + +{% block content %}{{ block.super }} dir1 one{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/three.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/three.html new file mode 100644 index 0000000000000000000000000000000000000000..d8e3c3cb74f3cf9ab430acb0739eafa937f97841 --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/three.html @@ -0,0 +1,3 @@ +{% extends "./dir2/../../three.html" %} + +{% block content %}{{ block.super }} dir1 three{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/dir1/two.html b/testbed/django__django/tests/template_tests/relative_templates/dir1/two.html new file mode 100644 index 0000000000000000000000000000000000000000..b6542b8a3e11ac5b4bb486a895e76d3a43a90277 --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/dir1/two.html @@ -0,0 +1,3 @@ +{% extends "./dir2/one.html" %} + +{% block content %}{{ block.super }} dir1 two{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/error_extends.html b/testbed/django__django/tests/template_tests/relative_templates/error_extends.html new file mode 100644 index 0000000000000000000000000000000000000000..83e41b2999d3fc7cfeffe1e9522570e4a9d4c4da --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/error_extends.html @@ -0,0 +1,3 @@ +{% extends "./../two.html" %} + +{% block content %}{{ block.super }} one{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/error_include.html b/testbed/django__django/tests/template_tests/relative_templates/error_include.html new file mode 100644 index 0000000000000000000000000000000000000000..a5efe30fbc524b7c94544330cf1d99fbb22d1425 --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/error_include.html @@ -0,0 +1 @@ +{% include "./../three.html" %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/one.html b/testbed/django__django/tests/template_tests/relative_templates/one.html new file mode 100644 index 0000000000000000000000000000000000000000..9ced0ff8e47bca23d058b5a6cda6ab45d634b94d --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/one.html @@ -0,0 +1,3 @@ +{% extends "./two.html" %} + +{% block content %}{{ block.super }} one{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/one_var.html b/testbed/django__django/tests/template_tests/relative_templates/one_var.html new file mode 100644 index 0000000000000000000000000000000000000000..1d040e1fe88433b26dbc1950f91456d0d2c11b9e --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/one_var.html @@ -0,0 +1,3 @@ +{% extends tmpl %} + +{% block content %}{{ block.super }} one{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/three.html b/testbed/django__django/tests/template_tests/relative_templates/three.html new file mode 100644 index 0000000000000000000000000000000000000000..360aeeea5e88c1c9a8316eeb0e597d023c640bc7 --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/three.html @@ -0,0 +1 @@ +{% block content %}three{% endblock %} diff --git a/testbed/django__django/tests/template_tests/relative_templates/two.html b/testbed/django__django/tests/template_tests/relative_templates/two.html new file mode 100644 index 0000000000000000000000000000000000000000..5fb317db9364d667dab16ae73e64ff71aa3480c5 --- /dev/null +++ b/testbed/django__django/tests/template_tests/relative_templates/two.html @@ -0,0 +1,3 @@ +{% extends "./three.html" %} + +{% block content %}{{ block.super }} two{% endblock %} diff --git a/testbed/django__django/tests/template_tests/syntax_tests/__init__.py b/testbed/django__django/tests/template_tests/syntax_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/__init__.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/base.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/base.py new file mode 100644 index 0000000000000000000000000000000000000000..6197179ee425844e9acc24286797133aa5a563d5 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/i18n/base.py @@ -0,0 +1,24 @@ +import os + +from django.conf import settings +from django.test import SimpleTestCase +from django.utils.translation import activate, get_language + +here = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +pdir = os.path.split(os.path.split(os.path.abspath(here))[0])[0] +extended_locale_paths = settings.LOCALE_PATHS + [ + os.path.join(pdir, "i18n", "other", "locale"), +] + + +class MultipleLocaleActivationTestCase(SimpleTestCase): + """ + Tests for template rendering when multiple locales are activated during the + lifetime of the same process. + """ + + def setUp(self): + self._old_language = get_language() + + def tearDown(self): + activate(self._old_language) diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_blocktranslate.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_blocktranslate.py new file mode 100644 index 0000000000000000000000000000000000000000..4c9036ba76037673faf8f43d903d84433de95a87 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_blocktranslate.py @@ -0,0 +1,893 @@ +import inspect +import os +from functools import partial, wraps + +from asgiref.local import Local + +from django.template import Context, Template, TemplateSyntaxError +from django.template.base import Token, TokenType +from django.templatetags.i18n import BlockTranslateNode +from django.test import SimpleTestCase, override_settings +from django.utils import translation +from django.utils.safestring import mark_safe +from django.utils.translation import trans_real + +from ...utils import setup as base_setup +from .base import MultipleLocaleActivationTestCase, extended_locale_paths, here + + +def setup(templates, *args, **kwargs): + blocktranslate_setup = base_setup(templates, *args, **kwargs) + blocktrans_setup = base_setup( + { + name: template.replace("{% blocktranslate ", "{% blocktrans ").replace( + "{% endblocktranslate %}", "{% endblocktrans %}" + ) + for name, template in templates.items() + } + ) + + tags = { + "blocktrans": blocktrans_setup, + "blocktranslate": blocktranslate_setup, + } + + def decorator(func): + @wraps(func) + def inner(self, *args): + signature = inspect.signature(func) + for tag_name, setup_func in tags.items(): + if "tag_name" in signature.parameters: + setup_func(partial(func, tag_name=tag_name))(self) + else: + setup_func(func)(self) + + return inner + + return decorator + + +class I18nBlockTransTagTests(SimpleTestCase): + libraries = {"i18n": "django.templatetags.i18n"} + + @setup( + { + "i18n03": ( + "{% load i18n %}{% blocktranslate %}{{ anton }}{% endblocktranslate %}" + ) + } + ) + def test_i18n03(self): + """simple translation of a variable""" + output = self.engine.render_to_string("i18n03", {"anton": "Å"}) + self.assertEqual(output, "Å") + + @setup( + { + "i18n04": ( + "{% load i18n %}{% blocktranslate with berta=anton|lower %}{{ berta }}" + "{% endblocktranslate %}" + ) + } + ) + def test_i18n04(self): + """simple translation of a variable and filter""" + output = self.engine.render_to_string("i18n04", {"anton": "Å"}) + self.assertEqual(output, "å") + + @setup( + { + "legacyi18n04": ( + "{% load i18n %}" + "{% blocktranslate with anton|lower as berta %}{{ berta }}" + "{% endblocktranslate %}" + ) + } + ) + def test_legacyi18n04(self): + """simple translation of a variable and filter""" + output = self.engine.render_to_string("legacyi18n04", {"anton": "Å"}) + self.assertEqual(output, "å") + + @setup( + { + "i18n05": ( + "{% load i18n %}{% blocktranslate %}xxx{{ anton }}xxx" + "{% endblocktranslate %}" + ) + } + ) + def test_i18n05(self): + """simple translation of a string with interpolation""" + output = self.engine.render_to_string("i18n05", {"anton": "yyy"}) + self.assertEqual(output, "xxxyyyxxx") + + @setup( + { + "i18n07": "{% load i18n %}" + "{% blocktranslate count counter=number %}singular{% plural %}" + "{{ counter }} plural{% endblocktranslate %}" + } + ) + def test_i18n07(self): + """translation of singular form""" + output = self.engine.render_to_string("i18n07", {"number": 1}) + self.assertEqual(output, "singular") + + @setup( + { + "legacyi18n07": "{% load i18n %}" + "{% blocktranslate count number as counter %}singular{% plural %}" + "{{ counter }} plural{% endblocktranslate %}" + } + ) + def test_legacyi18n07(self): + """translation of singular form""" + output = self.engine.render_to_string("legacyi18n07", {"number": 1}) + self.assertEqual(output, "singular") + + @setup( + { + "i18n08": "{% load i18n %}" + "{% blocktranslate count number as counter %}singular{% plural %}" + "{{ counter }} plural{% endblocktranslate %}" + } + ) + def test_i18n08(self): + """translation of plural form""" + output = self.engine.render_to_string("i18n08", {"number": 2}) + self.assertEqual(output, "2 plural") + + @setup( + { + "legacyi18n08": "{% load i18n %}" + "{% blocktranslate count counter=number %}singular{% plural %}" + "{{ counter }} plural{% endblocktranslate %}" + } + ) + def test_legacyi18n08(self): + """translation of plural form""" + output = self.engine.render_to_string("legacyi18n08", {"number": 2}) + self.assertEqual(output, "2 plural") + + @setup( + { + "i18n17": ( + "{% load i18n %}" + "{% blocktranslate with berta=anton|escape %}{{ berta }}" + "{% endblocktranslate %}" + ) + } + ) + def test_i18n17(self): + """ + Escaping inside blocktranslate and translate works as if it was + directly in the template. + """ + output = self.engine.render_to_string("i18n17", {"anton": "α & β"}) + self.assertEqual(output, "α & β") + + @setup( + { + "i18n18": ( + "{% load i18n %}" + "{% blocktranslate with berta=anton|force_escape %}{{ berta }}" + "{% endblocktranslate %}" + ) + } + ) + def test_i18n18(self): + output = self.engine.render_to_string("i18n18", {"anton": "α & β"}) + self.assertEqual(output, "α & β") + + @setup( + { + "i18n19": ( + "{% load i18n %}{% blocktranslate %}{{ andrew }}{% endblocktranslate %}" + ) + } + ) + def test_i18n19(self): + output = self.engine.render_to_string("i18n19", {"andrew": "a & b"}) + self.assertEqual(output, "a & b") + + @setup( + { + "i18n21": ( + "{% load i18n %}{% blocktranslate %}{{ andrew }}{% endblocktranslate %}" + ) + } + ) + def test_i18n21(self): + output = self.engine.render_to_string("i18n21", {"andrew": mark_safe("a & b")}) + self.assertEqual(output, "a & b") + + @setup( + { + "legacyi18n17": ( + "{% load i18n %}" + "{% blocktranslate with anton|escape as berta %}{{ berta }}" + "{% endblocktranslate %}" + ) + } + ) + def test_legacyi18n17(self): + output = self.engine.render_to_string("legacyi18n17", {"anton": "α & β"}) + self.assertEqual(output, "α & β") + + @setup( + { + "legacyi18n18": "{% load i18n %}" + "{% blocktranslate with anton|force_escape as berta %}" + "{{ berta }}{% endblocktranslate %}" + } + ) + def test_legacyi18n18(self): + output = self.engine.render_to_string("legacyi18n18", {"anton": "α & β"}) + self.assertEqual(output, "α & β") + + @setup( + { + "i18n26": "{% load i18n %}" + "{% blocktranslate with extra_field=myextra_field count counter=number %}" + "singular {{ extra_field }}{% plural %}plural{% endblocktranslate %}" + } + ) + def test_i18n26(self): + """ + translation of plural form with extra field in singular form (#13568) + """ + output = self.engine.render_to_string( + "i18n26", {"myextra_field": "test", "number": 1} + ) + self.assertEqual(output, "singular test") + + @setup( + { + "legacyi18n26": ( + "{% load i18n %}" + "{% blocktranslate with myextra_field as extra_field " + "count number as counter %}singular {{ extra_field }}{% plural %}plural" + "{% endblocktranslate %}" + ) + } + ) + def test_legacyi18n26(self): + output = self.engine.render_to_string( + "legacyi18n26", {"myextra_field": "test", "number": 1} + ) + self.assertEqual(output, "singular test") + + @setup( + { + "i18n27": "{% load i18n %}{% blocktranslate count counter=number %}" + "{{ counter }} result{% plural %}{{ counter }} results" + "{% endblocktranslate %}" + } + ) + def test_i18n27(self): + """translation of singular form in Russian (#14126)""" + with translation.override("ru"): + output = self.engine.render_to_string("i18n27", {"number": 1}) + self.assertEqual( + output, "1 \u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442" + ) + + @setup( + { + "legacyi18n27": "{% load i18n %}" + "{% blocktranslate count number as counter %}{{ counter }} result" + "{% plural %}{{ counter }} results{% endblocktranslate %}" + } + ) + def test_legacyi18n27(self): + with translation.override("ru"): + output = self.engine.render_to_string("legacyi18n27", {"number": 1}) + self.assertEqual( + output, "1 \u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442" + ) + + @setup( + { + "i18n28": ( + "{% load i18n %}" + "{% blocktranslate with a=anton b=berta %}{{ a }} + {{ b }}" + "{% endblocktranslate %}" + ) + } + ) + def test_i18n28(self): + """simple translation of multiple variables""" + output = self.engine.render_to_string("i18n28", {"anton": "α", "berta": "β"}) + self.assertEqual(output, "α + β") + + @setup( + { + "legacyi18n28": "{% load i18n %}" + "{% blocktranslate with anton as a and berta as b %}" + "{{ a }} + {{ b }}{% endblocktranslate %}" + } + ) + def test_legacyi18n28(self): + output = self.engine.render_to_string( + "legacyi18n28", {"anton": "α", "berta": "β"} + ) + self.assertEqual(output, "α + β") + + # blocktranslate handling of variables which are not in the context. + # this should work as if blocktranslate was not there (#19915) + @setup( + { + "i18n34": ( + "{% load i18n %}{% blocktranslate %}{{ missing }}" + "{% endblocktranslate %}" + ) + } + ) + def test_i18n34(self): + output = self.engine.render_to_string("i18n34") + if self.engine.string_if_invalid: + self.assertEqual(output, "INVALID") + else: + self.assertEqual(output, "") + + @setup( + { + "i18n34_2": ( + "{% load i18n %}{% blocktranslate with a='α' %}{{ missing }}" + "{% endblocktranslate %}" + ) + } + ) + def test_i18n34_2(self): + output = self.engine.render_to_string("i18n34_2") + if self.engine.string_if_invalid: + self.assertEqual(output, "INVALID") + else: + self.assertEqual(output, "") + + @setup( + { + "i18n34_3": ( + "{% load i18n %}{% blocktranslate with a=anton %}{{ missing }}" + "{% endblocktranslate %}" + ) + } + ) + def test_i18n34_3(self): + output = self.engine.render_to_string("i18n34_3", {"anton": "\xce\xb1"}) + if self.engine.string_if_invalid: + self.assertEqual(output, "INVALID") + else: + self.assertEqual(output, "") + + @setup( + { + "i18n37": "{% load i18n %}" + '{% translate "Page not found" as page_not_found %}' + "{% blocktranslate %}Error: {{ page_not_found }}{% endblocktranslate %}" + } + ) + def test_i18n37(self): + with translation.override("de"): + output = self.engine.render_to_string("i18n37") + self.assertEqual(output, "Error: Seite nicht gefunden") + + # blocktranslate tag with asvar + @setup( + { + "i18n39": ( + "{% load i18n %}" + "{% blocktranslate asvar page_not_found %}Page not found" + "{% endblocktranslate %}>{{ page_not_found }}<" + ) + } + ) + def test_i18n39(self): + with translation.override("de"): + output = self.engine.render_to_string("i18n39") + self.assertEqual(output, ">Seite nicht gefunden<") + + @setup( + { + "i18n40": "{% load i18n %}" + '{% translate "Page not found" as pg_404 %}' + "{% blocktranslate with page_not_found=pg_404 asvar output %}" + "Error: {{ page_not_found }}" + "{% endblocktranslate %}" + } + ) + def test_i18n40(self): + output = self.engine.render_to_string("i18n40") + self.assertEqual(output, "") + + @setup( + { + "i18n41": "{% load i18n %}" + '{% translate "Page not found" as pg_404 %}' + "{% blocktranslate with page_not_found=pg_404 asvar output %}" + "Error: {{ page_not_found }}" + "{% endblocktranslate %}" + ">{{ output }}<" + } + ) + def test_i18n41(self): + with translation.override("de"): + output = self.engine.render_to_string("i18n41") + self.assertEqual(output, ">Error: Seite nicht gefunden<") + + @setup( + { + "i18n_asvar_safestring": ( + "{% load i18n %}" + "{% blocktranslate asvar the_title %}" + "{{title}}other text" + "{% endblocktranslate %}" + "{{ the_title }}" + ) + } + ) + def test_i18n_asvar_safestring(self): + context = {"title": "
    "} + output = self.engine.render_to_string("i18n_asvar_safestring", context=context) + self.assertEqual(output, "<Main Title>other text") + + @setup( + { + "template": ( + "{% load i18n %}{% blocktranslate asvar %}Yes{% endblocktranslate %}" + ) + } + ) + def test_blocktrans_syntax_error_missing_assignment(self, tag_name): + msg = "No argument provided to the '{}' tag for the asvar option.".format( + tag_name + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup({"template": "{% load i18n %}{% blocktranslate %}%s{% endblocktranslate %}"}) + def test_blocktrans_tag_using_a_string_that_looks_like_str_fmt(self): + output = self.engine.render_to_string("template") + self.assertEqual(output, "%s") + + @setup( + { + "template": ( + "{% load i18n %}{% blocktranslate %}{% block b %} {% endblock %}" + "{% endblocktranslate %}" + ) + } + ) + def test_with_block(self, tag_name): + msg = "'{}' doesn't allow other block tags (seen 'block b') inside it".format( + tag_name + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup( + { + "template": ( + "{% load i18n %}" + "{% blocktranslate %}{% for b in [1, 2, 3] %} {% endfor %}" + "{% endblocktranslate %}" + ) + } + ) + def test_with_for(self, tag_name): + msg = ( + f"'{tag_name}' doesn't allow other block tags (seen 'for b in [1, 2, 3]') " + f"inside it" + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup( + { + "template": ( + "{% load i18n %}{% blocktranslate with foo=bar with %}{{ foo }}" + "{% endblocktranslate %}" + ) + } + ) + def test_variable_twice(self): + with self.assertRaisesMessage( + TemplateSyntaxError, "The 'with' option was specified more than once" + ): + self.engine.render_to_string("template", {"foo": "bar"}) + + @setup( + {"template": "{% load i18n %}{% blocktranslate with %}{% endblocktranslate %}"} + ) + def test_no_args_with(self, tag_name): + msg = "\"with\" in '{}' tag needs at least one keyword argument.".format( + tag_name + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup( + { + "template": ( + "{% load i18n %}{% blocktranslate count a %}{% endblocktranslate %}" + ) + } + ) + def test_count(self, tag_name): + msg = "\"count\" in '{}' tag expected exactly one keyword argument.".format( + tag_name + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template", {"a": [1, 2, 3]}) + + @setup( + { + "template": ( + "{% load i18n %}{% blocktranslate count counter=num %}{{ counter }}" + "{% plural %}{{ counter }}{% endblocktranslate %}" + ) + } + ) + def test_count_not_number(self, tag_name): + msg = "'counter' argument to '{}' tag must be a number.".format(tag_name) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template", {"num": "1"}) + + @setup( + { + "template": ( + "{% load i18n %}{% blocktranslate count count=var|length %}" + "There is {{ count }} object. {% block a %} {% endblock %}" + "{% endblocktranslate %}" + ) + } + ) + def test_plural_bad_syntax(self, tag_name): + msg = "'{}' doesn't allow other block tags inside it".format(tag_name) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template", {"var": [1, 2, 3]}) + + +class TranslationBlockTranslateTagTests(SimpleTestCase): + tag_name = "blocktranslate" + + def get_template(self, template_string): + return Template( + template_string.replace( + "{{% blocktranslate ", "{{% {}".format(self.tag_name) + ).replace( + "{{% endblocktranslate %}}", "{{% end{} %}}".format(self.tag_name) + ) + ) + + @override_settings(LOCALE_PATHS=extended_locale_paths) + def test_template_tags_pgettext(self): + """{% blocktranslate %} takes message contexts into account (#14806).""" + trans_real._active = Local() + trans_real._translations = {} + with translation.override("de"): + # Nonexistent context + t = self.get_template( + '{% load i18n %}{% blocktranslate context "nonexistent" %}May' + "{% endblocktranslate %}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "May") + + # Existing context... using a literal + t = self.get_template( + "{% load i18n %}" + '{% blocktranslate context "month name" %}May{% endblocktranslate %}' + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "Mai") + t = self.get_template( + "{% load i18n %}" + '{% blocktranslate context "verb" %}May{% endblocktranslate %}' + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "Kann") + + # Using a variable + t = self.get_template( + "{% load i18n %}{% blocktranslate context message_context %}" + "May{% endblocktranslate %}" + ) + rendered = t.render(Context({"message_context": "month name"})) + self.assertEqual(rendered, "Mai") + t = self.get_template( + "{% load i18n %}{% blocktranslate context message_context %}" + "May{% endblocktranslate %}" + ) + rendered = t.render(Context({"message_context": "verb"})) + self.assertEqual(rendered, "Kann") + + # Using a filter + t = self.get_template( + "{% load i18n %}" + "{% blocktranslate context message_context|lower %}May" + "{% endblocktranslate %}" + ) + rendered = t.render(Context({"message_context": "MONTH NAME"})) + self.assertEqual(rendered, "Mai") + t = self.get_template( + "{% load i18n %}" + "{% blocktranslate context message_context|lower %}May" + "{% endblocktranslate %}" + ) + rendered = t.render(Context({"message_context": "VERB"})) + self.assertEqual(rendered, "Kann") + + # Using 'count' + t = self.get_template( + "{% load i18n %}" + '{% blocktranslate count number=1 context "super search" %}{{ number }}' + " super result{% plural %}{{ number }} super results" + "{% endblocktranslate %}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "1 Super-Ergebnis") + t = self.get_template( + "{% load i18n %}" + '{% blocktranslate count number=2 context "super search" %}{{ number }}' + " super result{% plural %}{{ number }} super results" + "{% endblocktranslate %}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "2 Super-Ergebnisse") + t = self.get_template( + "{% load i18n %}" + '{% blocktranslate context "other super search" count number=1 %}' + "{{ number }} super result{% plural %}{{ number }} super results" + "{% endblocktranslate %}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "1 anderen Super-Ergebnis") + t = self.get_template( + "{% load i18n %}" + '{% blocktranslate context "other super search" count number=2 %}' + "{{ number }} super result{% plural %}{{ number }} super results" + "{% endblocktranslate %}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "2 andere Super-Ergebnisse") + + # Using 'with' + t = self.get_template( + "{% load i18n %}" + '{% blocktranslate with num_comments=5 context "comment count" %}' + "There are {{ num_comments }} comments{% endblocktranslate %}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "Es gibt 5 Kommentare") + t = self.get_template( + "{% load i18n %}" + '{% blocktranslate with num_comments=5 context "other comment count" %}' + "There are {{ num_comments }} comments{% endblocktranslate %}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "Andere: Es gibt 5 Kommentare") + + # Using trimmed + t = self.get_template( + "{% load i18n %}{% blocktranslate trimmed %}\n\nThere\n\t are 5 " + "\n\n comments\n{% endblocktranslate %}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "There are 5 comments") + t = self.get_template( + "{% load i18n %}" + '{% blocktranslate with num_comments=5 context "comment count" trimmed ' + "%}\n\n" + "There are \t\n \t {{ num_comments }} comments\n\n" + "{% endblocktranslate %}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "Es gibt 5 Kommentare") + t = self.get_template( + "{% load i18n %}" + '{% blocktranslate context "other super search" count number=2 trimmed ' + "%}\n{{ number }} super \n result{% plural %}{{ number }} super results" + "{% endblocktranslate %}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "2 andere Super-Ergebnisse") + + # Misuses + msg = "Unknown argument for 'blocktranslate' tag: %r." + with self.assertRaisesMessage(TemplateSyntaxError, msg % 'month="May"'): + self.get_template( + '{% load i18n %}{% blocktranslate context with month="May" %}' + "{{ month }}{% endblocktranslate %}" + ) + msg = ( + '"context" in %r tag expected exactly one argument.' % "blocktranslate" + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.get_template( + "{% load i18n %}{% blocktranslate context %}{% endblocktranslate %}" + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.get_template( + "{% load i18n %}{% blocktranslate count number=2 context %}" + "{{ number }} super result{% plural %}{{ number }}" + " super results{% endblocktranslate %}" + ) + + @override_settings(LOCALE_PATHS=[os.path.join(here, "other", "locale")]) + def test_bad_placeholder_1(self): + """ + Error in translation file should not crash template rendering (#16516). + (%(person)s is translated as %(personne)s in fr.po). + """ + with translation.override("fr"): + t = Template( + "{% load i18n %}{% blocktranslate %}My name is {{ person }}." + "{% endblocktranslate %}" + ) + rendered = t.render(Context({"person": "James"})) + self.assertEqual(rendered, "My name is James.") + + @override_settings(LOCALE_PATHS=[os.path.join(here, "other", "locale")]) + def test_bad_placeholder_2(self): + """ + Error in translation file should not crash template rendering (#18393). + (%(person) misses a 's' in fr.po, causing the string formatting to fail) + . + """ + with translation.override("fr"): + t = Template( + "{% load i18n %}{% blocktranslate %}My other name is {{ person }}." + "{% endblocktranslate %}" + ) + rendered = t.render(Context({"person": "James"})) + self.assertEqual(rendered, "My other name is James.") + + +class TranslationBlockTransnTagTests(TranslationBlockTranslateTagTests): + tag_name = "blocktrans" + + +class MultipleLocaleActivationBlockTranslateTests(MultipleLocaleActivationTestCase): + tag_name = "blocktranslate" + + def get_template(self, template_string): + return Template( + template_string.replace( + "{{% blocktranslate ", "{{% {}".format(self.tag_name) + ).replace( + "{{% endblocktranslate %}}", "{{% end{} %}}".format(self.tag_name) + ) + ) + + def test_single_locale_activation(self): + """ + Simple baseline behavior with one locale for all the supported i18n + constructs. + """ + with translation.override("fr"): + self.assertEqual( + self.get_template( + "{% load i18n %}{% blocktranslate %}Yes{% endblocktranslate %}" + ).render(Context({})), + "Oui", + ) + + def test_multiple_locale_btrans(self): + with translation.override("de"): + t = self.get_template( + "{% load i18n %}{% blocktranslate %}No{% endblocktranslate %}" + ) + with translation.override(self._old_language), translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + def test_multiple_locale_deactivate_btrans(self): + with translation.override("de", deactivate=True): + t = self.get_template( + "{% load i18n %}{% blocktranslate %}No{% endblocktranslate %}" + ) + with translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + def test_multiple_locale_direct_switch_btrans(self): + with translation.override("de"): + t = self.get_template( + "{% load i18n %}{% blocktranslate %}No{% endblocktranslate %}" + ) + with translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + +class MultipleLocaleActivationBlockTransTests( + MultipleLocaleActivationBlockTranslateTests +): + tag_name = "blocktrans" + + +class MiscTests(SimpleTestCase): + tag_name = "blocktranslate" + + def get_template(self, template_string): + return Template( + template_string.replace( + "{{% blocktranslate ", "{{% {}".format(self.tag_name) + ).replace( + "{{% endblocktranslate %}}", "{{% end{} %}}".format(self.tag_name) + ) + ) + + @override_settings(LOCALE_PATHS=extended_locale_paths) + def test_percent_in_translatable_block(self): + t_sing = self.get_template( + "{% load i18n %}{% blocktranslate %}The result was {{ percent }}%" + "{% endblocktranslate %}" + ) + t_plur = self.get_template( + "{% load i18n %}{% blocktranslate count num as number %}" + "{{ percent }}% represents {{ num }} object{% plural %}" + "{{ percent }}% represents {{ num }} objects{% endblocktranslate %}" + ) + with translation.override("de"): + self.assertEqual( + t_sing.render(Context({"percent": 42})), "Das Ergebnis war 42%" + ) + self.assertEqual( + t_plur.render(Context({"percent": 42, "num": 1})), + "42% stellt 1 Objekt dar", + ) + self.assertEqual( + t_plur.render(Context({"percent": 42, "num": 4})), + "42% stellt 4 Objekte dar", + ) + + @override_settings(LOCALE_PATHS=extended_locale_paths) + def test_percent_formatting_in_blocktranslate(self): + """ + Python's %-formatting is properly escaped in blocktranslate, singular, + or plural. + """ + t_sing = self.get_template( + "{% load i18n %}{% blocktranslate %}There are %(num_comments)s comments" + "{% endblocktranslate %}" + ) + t_plur = self.get_template( + "{% load i18n %}{% blocktranslate count num as number %}" + "%(percent)s% represents {{ num }} object{% plural %}" + "%(percent)s% represents {{ num }} objects{% endblocktranslate %}" + ) + with translation.override("de"): + # Strings won't get translated as they don't match after escaping % + self.assertEqual( + t_sing.render(Context({"num_comments": 42})), + "There are %(num_comments)s comments", + ) + self.assertEqual( + t_plur.render(Context({"percent": 42, "num": 1})), + "%(percent)s% represents 1 object", + ) + self.assertEqual( + t_plur.render(Context({"percent": 42, "num": 4})), + "%(percent)s% represents 4 objects", + ) + + +class MiscBlockTranslationTests(MiscTests): + tag_name = "blocktrans" + + +class BlockTranslateNodeTests(SimpleTestCase): + def test_repr(self): + block_translate_node = BlockTranslateNode( + extra_context={}, + singular=[ + Token(TokenType.TEXT, "content"), + Token(TokenType.VAR, "variable"), + ], + ) + self.assertEqual( + repr(block_translate_node), + ", ] ' + "plural=None>", + ) diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_filters.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_filters.py new file mode 100644 index 0000000000000000000000000000000000000000..c78b8aca5f4390d0e44863953a514c2cbc08c246 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_filters.py @@ -0,0 +1,61 @@ +from django.test import SimpleTestCase +from django.utils import translation + +from ...utils import setup + + +class I18nFiltersTests(SimpleTestCase): + libraries = { + "custom": "template_tests.templatetags.custom", + "i18n": "django.templatetags.i18n", + } + + @setup( + { + "i18n32": '{% load i18n %}{{ "hu"|language_name }} ' + '{{ "hu"|language_name_local }} {{ "hu"|language_bidi }} ' + '{{ "hu"|language_name_translated }}' + } + ) + def test_i18n32(self): + output = self.engine.render_to_string("i18n32") + self.assertEqual(output, "Hungarian Magyar False Hungarian") + + with translation.override("cs"): + output = self.engine.render_to_string("i18n32") + self.assertEqual(output, "Hungarian Magyar False maďarsky") + + @setup( + { + "i18n33": "{% load i18n %}" + "{{ langcode|language_name }} {{ langcode|language_name_local }} " + "{{ langcode|language_bidi }} {{ langcode|language_name_translated }}" + } + ) + def test_i18n33(self): + output = self.engine.render_to_string("i18n33", {"langcode": "nl"}) + self.assertEqual(output, "Dutch Nederlands False Dutch") + + with translation.override("cs"): + output = self.engine.render_to_string("i18n33", {"langcode": "nl"}) + self.assertEqual(output, "Dutch Nederlands False nizozemsky") + + @setup( + { + "i18n38_2": "{% load i18n custom %}" + '{% get_language_info_list for langcodes|noop:"x y" as langs %}' + "{% for l in langs %}{{ l.code }}: {{ l.name }}/" + "{{ l.name_local }}/{{ l.name_translated }} " + "bidi={{ l.bidi }}; {% endfor %}" + } + ) + def test_i18n38_2(self): + with translation.override("cs"): + output = self.engine.render_to_string( + "i18n38_2", {"langcodes": ["it", "fr"]} + ) + self.assertEqual( + output, + "it: Italian/italiano/italsky bidi=False; " + "fr: French/français/francouzsky bidi=False; ", + ) diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_available_languages.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_available_languages.py new file mode 100644 index 0000000000000000000000000000000000000000..ce2e60c0d96371390b8c9544f5012697f732d651 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_available_languages.py @@ -0,0 +1,28 @@ +from django.template import TemplateSyntaxError +from django.test import SimpleTestCase + +from ...utils import setup + + +class GetAvailableLanguagesTagTests(SimpleTestCase): + libraries = {"i18n": "django.templatetags.i18n"} + + @setup( + { + "i18n12": "{% load i18n %}" + "{% get_available_languages as langs %}{% for lang in langs %}" + '{% if lang.0 == "de" %}{{ lang.0 }}{% endif %}{% endfor %}' + } + ) + def test_i18n12(self): + output = self.engine.render_to_string("i18n12") + self.assertEqual(output, "de") + + @setup({"syntax_i18n": "{% load i18n %}{% get_available_languages a langs %}"}) + def test_no_as_var(self): + msg = ( + "'get_available_languages' requires 'as variable' (got " + "['get_available_languages', 'a', 'langs'])" + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("syntax_i18n") diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_current_language.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_current_language.py new file mode 100644 index 0000000000000000000000000000000000000000..7de1b53bd985bbb8235cf792aeed14e006777b75 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_current_language.py @@ -0,0 +1,17 @@ +from template_tests.utils import setup + +from django.template import TemplateSyntaxError +from django.test import SimpleTestCase + + +class I18nGetCurrentLanguageTagTests(SimpleTestCase): + libraries = {"i18n": "django.templatetags.i18n"} + + @setup({"template": "{% load i18n %} {% get_current_language %}"}) + def test_no_as_var(self): + msg = ( + "'get_current_language' requires 'as variable' (got " + "['get_current_language'])" + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_current_language_bidi.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_current_language_bidi.py new file mode 100644 index 0000000000000000000000000000000000000000..9b5704720967333fd96489fb4a25175b26ef1c69 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_current_language_bidi.py @@ -0,0 +1,17 @@ +from template_tests.utils import setup + +from django.template import TemplateSyntaxError +from django.test import SimpleTestCase + + +class I18nGetCurrentLanguageBidiTagTests(SimpleTestCase): + libraries = {"i18n": "django.templatetags.i18n"} + + @setup({"template": "{% load i18n %} {% get_current_language_bidi %}"}) + def test_no_as_var(self): + msg = ( + "'get_current_language_bidi' requires 'as variable' (got " + "['get_current_language_bidi'])" + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_language_info.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_language_info.py new file mode 100644 index 0000000000000000000000000000000000000000..cf05df13dbf4930e0318f203955ad28ad47a10d7 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_language_info.py @@ -0,0 +1,55 @@ +from django.template import TemplateSyntaxError +from django.test import SimpleTestCase +from django.utils import translation + +from ...utils import setup + + +class I18nGetLanguageInfoTagTests(SimpleTestCase): + libraries = { + "custom": "template_tests.templatetags.custom", + "i18n": "django.templatetags.i18n", + } + + # retrieving language information + @setup( + { + "i18n28_2": "{% load i18n %}" + '{% get_language_info for "de" as l %}' + "{{ l.code }}: {{ l.name }}/{{ l.name_local }} bidi={{ l.bidi }}" + } + ) + def test_i18n28_2(self): + output = self.engine.render_to_string("i18n28_2") + self.assertEqual(output, "de: German/Deutsch bidi=False") + + @setup( + { + "i18n29": "{% load i18n %}" + "{% get_language_info for LANGUAGE_CODE as l %}" + "{{ l.code }}: {{ l.name }}/{{ l.name_local }} bidi={{ l.bidi }}" + } + ) + def test_i18n29(self): + output = self.engine.render_to_string("i18n29", {"LANGUAGE_CODE": "fi"}) + self.assertEqual(output, "fi: Finnish/suomi bidi=False") + + # Test whitespace in filter arguments + @setup( + { + "i18n38": "{% load i18n custom %}" + '{% get_language_info for "de"|noop:"x y" as l %}' + "{{ l.code }}: {{ l.name }}/{{ l.name_local }}/" + "{{ l.name_translated }} bidi={{ l.bidi }}" + } + ) + def test_i18n38(self): + with translation.override("cs"): + output = self.engine.render_to_string("i18n38") + self.assertEqual(output, "de: German/Deutsch/německy bidi=False") + + @setup({"template": "{% load i18n %}{% get_language_info %}"}) + def test_no_for_as(self): + msg = "'get_language_info' requires 'for string as variable' (got [])" + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_language_info_list.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_language_info_list.py new file mode 100644 index 0000000000000000000000000000000000000000..962c888782e9d1f8f7a9363c6938d9be12e5f204 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_get_language_info_list.py @@ -0,0 +1,73 @@ +from django.template import TemplateSyntaxError +from django.test import SimpleTestCase +from django.utils import translation + +from ...utils import setup + + +class GetLanguageInfoListTests(SimpleTestCase): + libraries = { + "custom": "template_tests.templatetags.custom", + "i18n": "django.templatetags.i18n", + } + + @setup( + { + "i18n30": "{% load i18n %}" + "{% get_language_info_list for langcodes as langs %}" + "{% for l in langs %}{{ l.code }}: {{ l.name }}/" + "{{ l.name_local }} bidi={{ l.bidi }}; {% endfor %}" + } + ) + def test_i18n30(self): + output = self.engine.render_to_string("i18n30", {"langcodes": ["it", "no"]}) + self.assertEqual( + output, "it: Italian/italiano bidi=False; no: Norwegian/norsk bidi=False; " + ) + + @setup( + { + "i18n31": "{% load i18n %}" + "{% get_language_info_list for langcodes as langs %}" + "{% for l in langs %}{{ l.code }}: {{ l.name }}/" + "{{ l.name_local }} bidi={{ l.bidi }}; {% endfor %}" + } + ) + def test_i18n31(self): + output = self.engine.render_to_string( + "i18n31", {"langcodes": (("sl", "Slovenian"), ("fa", "Persian"))} + ) + self.assertEqual( + output, + "sl: Slovenian/Sloven\u0161\u010dina bidi=False; " + "fa: Persian/\u0641\u0627\u0631\u0633\u06cc bidi=True; ", + ) + + @setup( + { + "i18n38_2": "{% load i18n custom %}" + '{% get_language_info_list for langcodes|noop:"x y" as langs %}' + "{% for l in langs %}{{ l.code }}: {{ l.name }}/" + "{{ l.name_local }}/{{ l.name_translated }} " + "bidi={{ l.bidi }}; {% endfor %}" + } + ) + def test_i18n38_2(self): + with translation.override("cs"): + output = self.engine.render_to_string( + "i18n38_2", {"langcodes": ["it", "fr"]} + ) + self.assertEqual( + output, + "it: Italian/italiano/italsky bidi=False; " + "fr: French/français/francouzsky bidi=False; ", + ) + + @setup({"i18n_syntax": "{% load i18n %} {% get_language_info_list error %}"}) + def test_no_for_as(self): + msg = ( + "'get_language_info_list' requires 'for sequence as variable' (got " + "['error'])" + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("i18n_syntax") diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_language.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_language.py new file mode 100644 index 0000000000000000000000000000000000000000..57a3a538665c58f3e6c819da09c7a884d6c12a8e --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_language.py @@ -0,0 +1,15 @@ +from template_tests.utils import setup + +from django.template import TemplateSyntaxError +from django.test import SimpleTestCase + + +class I18nLanguageTagTests(SimpleTestCase): + libraries = {"i18n": "django.templatetags.i18n"} + + @setup({"i18n_language": "{% load i18n %} {% language %} {% endlanguage %}"}) + def test_no_arg(self): + with self.assertRaisesMessage( + TemplateSyntaxError, "'language' takes one argument (language)" + ): + self.engine.render_to_string("i18n_language") diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_translate.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_translate.py new file mode 100644 index 0000000000000000000000000000000000000000..d8a224df9da4b68c279f6a7460e3afbdcf085d01 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_translate.py @@ -0,0 +1,316 @@ +import inspect +from functools import partial, wraps + +from asgiref.local import Local + +from django.template import Context, Template, TemplateSyntaxError +from django.templatetags.l10n import LocalizeNode +from django.test import SimpleTestCase, override_settings +from django.utils import translation +from django.utils.safestring import mark_safe +from django.utils.translation import trans_real + +from ...utils import setup as base_setup +from .base import MultipleLocaleActivationTestCase, extended_locale_paths + + +def setup(templates, *args, **kwargs): + translate_setup = base_setup(templates, *args, **kwargs) + trans_setup = base_setup( + { + name: template.replace("{% translate ", "{% trans ") + for name, template in templates.items() + } + ) + + tags = { + "trans": trans_setup, + "translate": translate_setup, + } + + def decorator(func): + @wraps(func) + def inner(self, *args): + signature = inspect.signature(func) + for tag_name, setup_func in tags.items(): + if "tag_name" in signature.parameters: + setup_func(partial(func, tag_name=tag_name))(self) + else: + setup_func(func)(self) + + return inner + + return decorator + + +class I18nTransTagTests(SimpleTestCase): + libraries = {"i18n": "django.templatetags.i18n"} + + @setup({"i18n01": "{% load i18n %}{% translate 'xxxyyyxxx' %}"}) + def test_i18n01(self): + """simple translation of a string delimited by '.""" + output = self.engine.render_to_string("i18n01") + self.assertEqual(output, "xxxyyyxxx") + + @setup({"i18n02": '{% load i18n %}{% translate "xxxyyyxxx" %}'}) + def test_i18n02(self): + """simple translation of a string delimited by ".""" + output = self.engine.render_to_string("i18n02") + self.assertEqual(output, "xxxyyyxxx") + + @setup({"i18n06": '{% load i18n %}{% translate "Page not found" %}'}) + def test_i18n06(self): + """simple translation of a string to German""" + with translation.override("de"): + output = self.engine.render_to_string("i18n06") + self.assertEqual(output, "Seite nicht gefunden") + + @setup({"i18n09": '{% load i18n %}{% translate "Page not found" noop %}'}) + def test_i18n09(self): + """simple non-translation (only marking) of a string to German""" + with translation.override("de"): + output = self.engine.render_to_string("i18n09") + self.assertEqual(output, "Page not found") + + @setup({"i18n20": "{% load i18n %}{% translate andrew %}"}) + def test_i18n20(self): + output = self.engine.render_to_string("i18n20", {"andrew": "a & b"}) + self.assertEqual(output, "a & b") + + @setup({"i18n22": "{% load i18n %}{% translate andrew %}"}) + def test_i18n22(self): + output = self.engine.render_to_string("i18n22", {"andrew": mark_safe("a & b")}) + self.assertEqual(output, "a & b") + + @setup( + { + "i18n23": ( + '{% load i18n %}{% translate "Page not found"|capfirst|slice:"6:" %}' + ) + } + ) + def test_i18n23(self): + """Using filters with the {% translate %} tag (#5972).""" + with translation.override("de"): + output = self.engine.render_to_string("i18n23") + self.assertEqual(output, "nicht gefunden") + + @setup({"i18n24": "{% load i18n %}{% translate 'Page not found'|upper %}"}) + def test_i18n24(self): + with translation.override("de"): + output = self.engine.render_to_string("i18n24") + self.assertEqual(output, "SEITE NICHT GEFUNDEN") + + @setup({"i18n25": "{% load i18n %}{% translate somevar|upper %}"}) + def test_i18n25(self): + with translation.override("de"): + output = self.engine.render_to_string( + "i18n25", {"somevar": "Page not found"} + ) + self.assertEqual(output, "SEITE NICHT GEFUNDEN") + + # trans tag with as var + @setup( + { + "i18n35": ( + '{% load i18n %}{% translate "Page not found" as page_not_found %}' + "{{ page_not_found }}" + ) + } + ) + def test_i18n35(self): + with translation.override("de"): + output = self.engine.render_to_string("i18n35") + self.assertEqual(output, "Seite nicht gefunden") + + @setup( + { + "i18n36": ( + '{% load i18n %}{% translate "Page not found" noop as page_not_found %}' + "{{ page_not_found }}" + ) + } + ) + def test_i18n36(self): + with translation.override("de"): + output = self.engine.render_to_string("i18n36") + self.assertEqual(output, "Page not found") + + @setup({"template": "{% load i18n %}{% translate %}A}"}) + def test_syntax_error_no_arguments(self, tag_name): + msg = "'{}' takes at least one argument".format(tag_name) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup({"template": '{% load i18n %}{% translate "Yes" badoption %}'}) + def test_syntax_error_bad_option(self, tag_name): + msg = "Unknown argument for '{}' tag: 'badoption'".format(tag_name) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup({"template": '{% load i18n %}{% translate "Yes" as %}'}) + def test_syntax_error_missing_assignment(self, tag_name): + msg = "No argument provided to the '{}' tag for the as option.".format(tag_name) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup({"template": '{% load i18n %}{% translate "Yes" as var context %}'}) + def test_syntax_error_missing_context(self, tag_name): + msg = "No argument provided to the '{}' tag for the context option.".format( + tag_name + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup({"template": '{% load i18n %}{% translate "Yes" context as var %}'}) + def test_syntax_error_context_as(self, tag_name): + msg = ( + f"Invalid argument 'as' provided to the '{tag_name}' tag for the context " + f"option" + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup({"template": '{% load i18n %}{% translate "Yes" context noop %}'}) + def test_syntax_error_context_noop(self, tag_name): + msg = ( + f"Invalid argument 'noop' provided to the '{tag_name}' tag for the context " + f"option" + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup({"template": '{% load i18n %}{% translate "Yes" noop noop %}'}) + def test_syntax_error_duplicate_option(self): + msg = "The 'noop' option was specified more than once." + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup({"template": '{% load i18n %}{% translate "%s" %}'}) + def test_trans_tag_using_a_string_that_looks_like_str_fmt(self): + output = self.engine.render_to_string("template") + self.assertEqual(output, "%s") + + +class TranslationTransTagTests(SimpleTestCase): + tag_name = "trans" + + def get_template(self, template_string): + return Template( + template_string.replace("{{% translate ", "{{% {}".format(self.tag_name)) + ) + + @override_settings(LOCALE_PATHS=extended_locale_paths) + def test_template_tags_pgettext(self): + """{% translate %} takes message contexts into account (#14806).""" + trans_real._active = Local() + trans_real._translations = {} + with translation.override("de"): + # Nonexistent context... + t = self.get_template( + '{% load i18n %}{% translate "May" context "nonexistent" %}' + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "May") + + # Existing context... using a literal + t = self.get_template( + '{% load i18n %}{% translate "May" context "month name" %}' + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "Mai") + t = self.get_template('{% load i18n %}{% translate "May" context "verb" %}') + rendered = t.render(Context()) + self.assertEqual(rendered, "Kann") + + # Using a variable + t = self.get_template( + '{% load i18n %}{% translate "May" context message_context %}' + ) + rendered = t.render(Context({"message_context": "month name"})) + self.assertEqual(rendered, "Mai") + t = self.get_template( + '{% load i18n %}{% translate "May" context message_context %}' + ) + rendered = t.render(Context({"message_context": "verb"})) + self.assertEqual(rendered, "Kann") + + # Using a filter + t = self.get_template( + '{% load i18n %}{% translate "May" context message_context|lower %}' + ) + rendered = t.render(Context({"message_context": "MONTH NAME"})) + self.assertEqual(rendered, "Mai") + t = self.get_template( + '{% load i18n %}{% translate "May" context message_context|lower %}' + ) + rendered = t.render(Context({"message_context": "VERB"})) + self.assertEqual(rendered, "Kann") + + # Using 'as' + t = self.get_template( + '{% load i18n %}{% translate "May" context "month name" as var %}' + "Value: {{ var }}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "Value: Mai") + t = self.get_template( + '{% load i18n %}{% translate "May" as var context "verb" %}Value: ' + "{{ var }}" + ) + rendered = t.render(Context()) + self.assertEqual(rendered, "Value: Kann") + + +class TranslationTranslateTagTests(TranslationTransTagTests): + tag_name = "translate" + + +class MultipleLocaleActivationTransTagTests(MultipleLocaleActivationTestCase): + tag_name = "trans" + + def get_template(self, template_string): + return Template( + template_string.replace("{{% translate ", "{{% {}".format(self.tag_name)) + ) + + def test_single_locale_activation(self): + """ + Simple baseline behavior with one locale for all the supported i18n + constructs. + """ + with translation.override("fr"): + self.assertEqual( + self.get_template("{% load i18n %}{% translate 'Yes' %}").render( + Context({}) + ), + "Oui", + ) + + def test_multiple_locale_trans(self): + with translation.override("de"): + t = self.get_template("{% load i18n %}{% translate 'No' %}") + with translation.override(self._old_language), translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + def test_multiple_locale_deactivate_trans(self): + with translation.override("de", deactivate=True): + t = self.get_template("{% load i18n %}{% translate 'No' %}") + with translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + def test_multiple_locale_direct_switch_trans(self): + with translation.override("de"): + t = self.get_template("{% load i18n %}{% translate 'No' %}") + with translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + +class MultipleLocaleActivationTranslateTagTests(MultipleLocaleActivationTransTagTests): + tag_name = "translate" + + +class LocalizeNodeTests(SimpleTestCase): + def test_repr(self): + node = LocalizeNode(nodelist=[], use_l10n=True) + self.assertEqual(repr(node), "") diff --git a/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_underscore_syntax.py b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_underscore_syntax.py new file mode 100644 index 0000000000000000000000000000000000000000..782ce015d89b36bd1fffd88db2d6ecc4a564a800 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/i18n/test_underscore_syntax.py @@ -0,0 +1,113 @@ +from django.template import Context, Template +from django.test import SimpleTestCase +from django.utils import translation + +from ...utils import setup +from .base import MultipleLocaleActivationTestCase + + +class MultipleLocaleActivationTests(MultipleLocaleActivationTestCase): + def test_single_locale_activation(self): + """ + Simple baseline behavior with one locale for all the supported i18n + constructs. + """ + with translation.override("fr"): + self.assertEqual(Template("{{ _('Yes') }}").render(Context({})), "Oui") + + # Literal marked up with _() in a filter expression + + def test_multiple_locale_filter(self): + with translation.override("de"): + t = Template("{% load i18n %}{{ 0|yesno:_('yes,no,maybe') }}") + with translation.override(self._old_language), translation.override("nl"): + self.assertEqual(t.render(Context({})), "nee") + + def test_multiple_locale_filter_deactivate(self): + with translation.override("de", deactivate=True): + t = Template("{% load i18n %}{{ 0|yesno:_('yes,no,maybe') }}") + with translation.override("nl"): + self.assertEqual(t.render(Context({})), "nee") + + def test_multiple_locale_filter_direct_switch(self): + with translation.override("de"): + t = Template("{% load i18n %}{{ 0|yesno:_('yes,no,maybe') }}") + with translation.override("nl"): + self.assertEqual(t.render(Context({})), "nee") + + # Literal marked up with _() + + def test_multiple_locale(self): + with translation.override("de"): + t = Template("{{ _('No') }}") + with translation.override(self._old_language), translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + def test_multiple_locale_deactivate(self): + with translation.override("de", deactivate=True): + t = Template("{{ _('No') }}") + with translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + def test_multiple_locale_direct_switch(self): + with translation.override("de"): + t = Template("{{ _('No') }}") + with translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + # Literal marked up with _(), loading the i18n template tag library + + def test_multiple_locale_loadi18n(self): + with translation.override("de"): + t = Template("{% load i18n %}{{ _('No') }}") + with translation.override(self._old_language), translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + def test_multiple_locale_loadi18n_deactivate(self): + with translation.override("de", deactivate=True): + t = Template("{% load i18n %}{{ _('No') }}") + with translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + def test_multiple_locale_loadi18n_direct_switch(self): + with translation.override("de"): + t = Template("{% load i18n %}{{ _('No') }}") + with translation.override("nl"): + self.assertEqual(t.render(Context({})), "Nee") + + +class I18nStringLiteralTests(SimpleTestCase): + """translation of constant strings""" + + libraries = {"i18n": "django.templatetags.i18n"} + + @setup({"i18n13": '{{ _("Password") }}'}) + def test_i18n13(self): + with translation.override("de"): + output = self.engine.render_to_string("i18n13") + self.assertEqual(output, "Passwort") + + @setup( + { + "i18n14": ( + '{% cycle "foo" _("Password") _(\'Password\') as c %} {% cycle c %} ' + "{% cycle c %}" + ) + } + ) + def test_i18n14(self): + with translation.override("de"): + output = self.engine.render_to_string("i18n14") + self.assertEqual(output, "foo Passwort Passwort") + + @setup({"i18n15": '{{ absent|default:_("Password") }}'}) + def test_i18n15(self): + with translation.override("de"): + output = self.engine.render_to_string("i18n15", {"absent": ""}) + self.assertEqual(output, "Passwort") + + @setup({"i18n16": '{{ _("<") }}'}) + def test_i18n16(self): + with translation.override("de"): + output = self.engine.render_to_string("i18n16") + self.assertEqual(output, "<") diff --git a/testbed/django__django/tests/template_tests/syntax_tests/test_autoescape.py b/testbed/django__django/tests/template_tests/syntax_tests/test_autoescape.py new file mode 100644 index 0000000000000000000000000000000000000000..a45b850fcfe5ed2121934f1cb6f61075d5764096 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/test_autoescape.py @@ -0,0 +1,174 @@ +from django.template import TemplateSyntaxError +from django.test import SimpleTestCase +from django.utils.safestring import mark_safe + +from ..utils import SafeClass, UnsafeClass, setup + + +class AutoescapeTagTests(SimpleTestCase): + @setup({"autoescape-tag01": "{% autoescape off %}hello{% endautoescape %}"}) + def test_autoescape_tag01(self): + output = self.engine.render_to_string("autoescape-tag01") + self.assertEqual(output, "hello") + + @setup({"autoescape-tag02": "{% autoescape off %}{{ first }}{% endautoescape %}"}) + def test_autoescape_tag02(self): + output = self.engine.render_to_string( + "autoescape-tag02", {"first": "hello"} + ) + self.assertEqual(output, "hello") + + @setup({"autoescape-tag03": "{% autoescape on %}{{ first }}{% endautoescape %}"}) + def test_autoescape_tag03(self): + output = self.engine.render_to_string( + "autoescape-tag03", {"first": "hello"} + ) + self.assertEqual(output, "<b>hello</b>") + + # Autoescape disabling and enabling nest in a predictable way. + @setup( + { + "autoescape-tag04": ( + "{% autoescape off %}{{ first }} {% autoescape on %}{{ first }}" + "{% endautoescape %}{% endautoescape %}" + ) + } + ) + def test_autoescape_tag04(self): + output = self.engine.render_to_string("autoescape-tag04", {"first": ""}) + self.assertEqual(output, " <a>") + + @setup({"autoescape-tag05": "{% autoescape on %}{{ first }}{% endautoescape %}"}) + def test_autoescape_tag05(self): + output = self.engine.render_to_string( + "autoescape-tag05", {"first": "first"} + ) + self.assertEqual(output, "<b>first</b>") + + # Strings (ASCII or Unicode) already marked as "safe" are not + # auto-escaped + @setup({"autoescape-tag06": "{{ first }}"}) + def test_autoescape_tag06(self): + output = self.engine.render_to_string( + "autoescape-tag06", {"first": mark_safe("first")} + ) + self.assertEqual(output, "first") + + @setup({"autoescape-tag07": "{% autoescape on %}{{ first }}{% endautoescape %}"}) + def test_autoescape_tag07(self): + output = self.engine.render_to_string( + "autoescape-tag07", {"first": mark_safe("Apple")} + ) + self.assertEqual(output, "Apple") + + @setup( + { + "autoescape-tag08": ( + r'{% autoescape on %}{{ var|default_if_none:" endquote\" hah" }}' + r"{% endautoescape %}" + ) + } + ) + def test_autoescape_tag08(self): + """ + Literal string arguments to filters, if used in the result, are safe. + """ + output = self.engine.render_to_string("autoescape-tag08", {"var": None}) + self.assertEqual(output, ' endquote" hah') + + # Objects which return safe strings as their __str__ method + # won't get double-escaped. + @setup({"autoescape-tag09": r"{{ unsafe }}"}) + def test_autoescape_tag09(self): + output = self.engine.render_to_string( + "autoescape-tag09", {"unsafe": UnsafeClass()} + ) + self.assertEqual(output, "you & me") + + @setup({"autoescape-tag10": r"{{ safe }}"}) + def test_autoescape_tag10(self): + output = self.engine.render_to_string("autoescape-tag10", {"safe": SafeClass()}) + self.assertEqual(output, "you > me") + + @setup( + { + "autoescape-filtertag01": ( + "{{ first }}{% filter safe %}{{ first }} x"}) + + # Arguments to filters are 'safe' and manipulate their input unescaped. + @setup({"autoescape-filters01": '{{ var|cut:"&" }}'}) + def test_autoescape_filters01(self): + output = self.engine.render_to_string( + "autoescape-filters01", {"var": "this & that"} + ) + self.assertEqual(output, "this that") + + @setup({"autoescape-filters02": '{{ var|join:" & " }}'}) + def test_autoescape_filters02(self): + output = self.engine.render_to_string( + "autoescape-filters02", {"var": ("Tom", "Dick", "Harry")} + ) + self.assertEqual(output, "Tom & Dick & Harry") + + @setup({"autoescape-literals01": '{{ "this & that" }}'}) + def test_autoescape_literals01(self): + """ + Literal strings are safe. + """ + output = self.engine.render_to_string("autoescape-literals01") + self.assertEqual(output, "this & that") + + @setup({"autoescape-stringiterations01": "{% for l in var %}{{ l }},{% endfor %}"}) + def test_autoescape_stringiterations01(self): + """ + Iterating over strings outputs safe characters. + """ + output = self.engine.render_to_string( + "autoescape-stringiterations01", {"var": "K&R"} + ) + self.assertEqual(output, "K,&,R,") + + @setup({"autoescape-lookup01": "{{ var.key }}"}) + def test_autoescape_lookup01(self): + """ + Escape requirement survives lookup. + """ + output = self.engine.render_to_string( + "autoescape-lookup01", {"var": {"key": "this & that"}} + ) + self.assertEqual(output, "this & that") + + @setup( + { + "autoescape-incorrect-arg": ( + "{% autoescape true %}{{ var.key }}{% endautoescape %}" + ) + } + ) + def test_invalid_arg(self): + msg = "'autoescape' argument should be 'on' or 'off'" + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string( + "autoescape-incorrect-arg", {"var": {"key": "this & that"}} + ) + + @setup( + {"autoescape-incorrect-arg": "{% autoescape %}{{ var.key }}{% endautoescape %}"} + ) + def test_no_arg(self): + msg = "'autoescape' tag requires exactly one argument." + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string( + "autoescape-incorrect-arg", {"var": {"key": "this & that"}} + ) diff --git a/testbed/django__django/tests/template_tests/syntax_tests/test_basic.py b/testbed/django__django/tests/template_tests/syntax_tests/test_basic.py new file mode 100644 index 0000000000000000000000000000000000000000..20bf30d55cc533b50b783bc597a3c9362319fbd2 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/test_basic.py @@ -0,0 +1,358 @@ +from django.template.base import TemplateSyntaxError +from django.template.context import Context +from django.template.loader_tags import BlockContext, BlockNode +from django.test import SimpleTestCase + +from ..utils import SilentAttrClass, SilentGetItemClass, SomeClass, setup + +basic_templates = { + "basic-syntax01": "something cool", + "basic-syntax02": "{{ headline }}", + "basic-syntax03": "{{ first }} --- {{ second }}", +} + + +class BasicSyntaxTests(SimpleTestCase): + @setup(basic_templates) + def test_basic_syntax01(self): + """ + Plain text should go through the template parser untouched. + """ + output = self.engine.render_to_string("basic-syntax01") + self.assertEqual(output, "something cool") + + @setup(basic_templates) + def test_basic_syntax02(self): + """ + Variables should be replaced with their value in the current + context + """ + output = self.engine.render_to_string("basic-syntax02", {"headline": "Success"}) + self.assertEqual(output, "Success") + + @setup(basic_templates) + def test_basic_syntax03(self): + """ + More than one replacement variable is allowed in a template + """ + output = self.engine.render_to_string( + "basic-syntax03", {"first": 1, "second": 2} + ) + self.assertEqual(output, "1 --- 2") + + @setup({"basic-syntax04": "as{{ missing }}df"}) + def test_basic_syntax04(self): + """ + Fail silently when a variable is not found in the current context + """ + output = self.engine.render_to_string("basic-syntax04") + if self.engine.string_if_invalid: + self.assertEqual(output, "asINVALIDdf") + else: + self.assertEqual(output, "asdf") + + @setup({"basic-syntax06": "{{ multi word variable }}"}) + def test_basic_syntax06(self): + """ + A variable may not contain more than one word + """ + with self.assertRaises(TemplateSyntaxError): + self.engine.get_template("basic-syntax06") + + @setup({"basic-syntax07": "{{ }}"}) + def test_basic_syntax07(self): + """ + Raise TemplateSyntaxError for empty variable tags. + """ + with self.assertRaisesMessage( + TemplateSyntaxError, "Empty variable tag on line 1" + ): + self.engine.get_template("basic-syntax07") + + @setup({"basic-syntax08": "{{ }}"}) + def test_basic_syntax08(self): + """ + Raise TemplateSyntaxError for empty variable tags. + """ + with self.assertRaisesMessage( + TemplateSyntaxError, "Empty variable tag on line 1" + ): + self.engine.get_template("basic-syntax08") + + @setup({"basic-syntax09": "{{ var.method }}"}) + def test_basic_syntax09(self): + """ + Attribute syntax allows a template to call an object's attribute + """ + output = self.engine.render_to_string("basic-syntax09", {"var": SomeClass()}) + self.assertEqual(output, "SomeClass.method") + + @setup({"basic-syntax10": "{{ var.otherclass.method }}"}) + def test_basic_syntax10(self): + """ + Multiple levels of attribute access are allowed. + """ + output = self.engine.render_to_string("basic-syntax10", {"var": SomeClass()}) + self.assertEqual(output, "OtherClass.method") + + @setup({"basic-syntax11": "{{ var.blech }}"}) + def test_basic_syntax11(self): + """ + Fail silently when a variable's attribute isn't found. + """ + output = self.engine.render_to_string("basic-syntax11", {"var": SomeClass()}) + + if self.engine.string_if_invalid: + self.assertEqual(output, "INVALID") + else: + self.assertEqual(output, "") + + @setup({"basic-syntax12": "{{ var.__dict__ }}"}) + def test_basic_syntax12(self): + """ + Raise TemplateSyntaxError when trying to access a variable + beginning with an underscore. + """ + with self.assertRaises(TemplateSyntaxError): + self.engine.get_template("basic-syntax12") + + # Raise TemplateSyntaxError when trying to access a variable + # containing an illegal character. + @setup({"basic-syntax13": "{{ va>r }}"}) + def test_basic_syntax13(self): + with self.assertRaises(TemplateSyntaxError): + self.engine.get_template("basic-syntax13") + + @setup({"basic-syntax14": "{{ (var.r) }}"}) + def test_basic_syntax14(self): + with self.assertRaises(TemplateSyntaxError): + self.engine.get_template("basic-syntax14") + + @setup({"basic-syntax15": "{{ sp%am }}"}) + def test_basic_syntax15(self): + with self.assertRaises(TemplateSyntaxError): + self.engine.get_template("basic-syntax15") + + @setup({"basic-syntax16": "{{ eggs! }}"}) + def test_basic_syntax16(self): + with self.assertRaises(TemplateSyntaxError): + self.engine.get_template("basic-syntax16") + + @setup({"basic-syntax17": "{{ moo? }}"}) + def test_basic_syntax17(self): + with self.assertRaises(TemplateSyntaxError): + self.engine.get_template("basic-syntax17") + + @setup({"basic-syntax18": "{{ foo.bar }}"}) + def test_basic_syntax18(self): + """ + Attribute syntax allows a template to call a dictionary key's + value. + """ + output = self.engine.render_to_string("basic-syntax18", {"foo": {"bar": "baz"}}) + self.assertEqual(output, "baz") + + @setup({"basic-syntax19": "{{ foo.spam }}"}) + def test_basic_syntax19(self): + """ + Fail silently when a variable's dictionary key isn't found. + """ + output = self.engine.render_to_string("basic-syntax19", {"foo": {"bar": "baz"}}) + + if self.engine.string_if_invalid: + self.assertEqual(output, "INVALID") + else: + self.assertEqual(output, "") + + @setup({"basic-syntax20": "{{ var.method2 }}"}) + def test_basic_syntax20(self): + """ + Fail silently when accessing a non-simple method + """ + output = self.engine.render_to_string("basic-syntax20", {"var": SomeClass()}) + + if self.engine.string_if_invalid: + self.assertEqual(output, "INVALID") + else: + self.assertEqual(output, "") + + @setup({"basic-syntax20b": "{{ var.method5 }}"}) + def test_basic_syntax20b(self): + """ + Don't silence a TypeError if it was raised inside a callable. + """ + template = self.engine.get_template("basic-syntax20b") + + with self.assertRaises(TypeError): + template.render(Context({"var": SomeClass()})) + + # Don't get confused when parsing something that is almost, but not + # quite, a template tag. + @setup({"basic-syntax21": "a {{ moo %} b"}) + def test_basic_syntax21(self): + output = self.engine.render_to_string("basic-syntax21") + self.assertEqual(output, "a {{ moo %} b") + + @setup({"basic-syntax22": "{{ moo #}"}) + def test_basic_syntax22(self): + output = self.engine.render_to_string("basic-syntax22") + self.assertEqual(output, "{{ moo #}") + + @setup({"basic-syntax23": "{{ moo #} {{ cow }}"}) + def test_basic_syntax23(self): + """ + Treat "moo #} {{ cow" as the variable. Not ideal, but costly to work + around, so this triggers an error. + """ + with self.assertRaises(TemplateSyntaxError): + self.engine.get_template("basic-syntax23") + + @setup({"basic-syntax24": "{{ moo\n }}"}) + def test_basic_syntax24(self): + """ + Embedded newlines make it not-a-tag. + """ + output = self.engine.render_to_string("basic-syntax24") + self.assertEqual(output, "{{ moo\n }}") + + # Literal strings are permitted inside variables, mostly for i18n + # purposes. + @setup({"basic-syntax25": '{{ "fred" }}'}) + def test_basic_syntax25(self): + output = self.engine.render_to_string("basic-syntax25") + self.assertEqual(output, "fred") + + @setup({"basic-syntax26": r'{{ "\"fred\"" }}'}) + def test_basic_syntax26(self): + output = self.engine.render_to_string("basic-syntax26") + self.assertEqual(output, '"fred"') + + @setup({"basic-syntax27": r'{{ _("\"fred\"") }}'}) + def test_basic_syntax27(self): + output = self.engine.render_to_string("basic-syntax27") + self.assertEqual(output, '"fred"') + + # #12554 -- Make sure a silent_variable_failure Exception is + # suppressed on dictionary and attribute lookup. + @setup({"basic-syntax28": "{{ a.b }}"}) + def test_basic_syntax28(self): + output = self.engine.render_to_string( + "basic-syntax28", {"a": SilentGetItemClass()} + ) + if self.engine.string_if_invalid: + self.assertEqual(output, "INVALID") + else: + self.assertEqual(output, "") + + @setup({"basic-syntax29": "{{ a.b }}"}) + def test_basic_syntax29(self): + output = self.engine.render_to_string( + "basic-syntax29", {"a": SilentAttrClass()} + ) + if self.engine.string_if_invalid: + self.assertEqual(output, "INVALID") + else: + self.assertEqual(output, "") + + # Something that starts like a number but has an extra lookup works + # as a lookup. + @setup({"basic-syntax30": "{{ 1.2.3 }}"}) + def test_basic_syntax30(self): + output = self.engine.render_to_string( + "basic-syntax30", {"1": {"2": {"3": "d"}}} + ) + self.assertEqual(output, "d") + + @setup({"basic-syntax31": "{{ 1.2.3 }}"}) + def test_basic_syntax31(self): + output = self.engine.render_to_string( + "basic-syntax31", + {"1": {"2": ("a", "b", "c", "d")}}, + ) + self.assertEqual(output, "d") + + @setup({"basic-syntax32": "{{ 1.2.3 }}"}) + def test_basic_syntax32(self): + output = self.engine.render_to_string( + "basic-syntax32", + {"1": (("x", "x", "x", "x"), ("y", "y", "y", "y"), ("a", "b", "c", "d"))}, + ) + self.assertEqual(output, "d") + + @setup({"basic-syntax33": "{{ 1.2.3 }}"}) + def test_basic_syntax33(self): + output = self.engine.render_to_string( + "basic-syntax33", + {"1": ("xxxx", "yyyy", "abcd")}, + ) + self.assertEqual(output, "d") + + @setup({"basic-syntax34": "{{ 1.2.3 }}"}) + def test_basic_syntax34(self): + output = self.engine.render_to_string( + "basic-syntax34", {"1": ({"x": "x"}, {"y": "y"}, {"z": "z", "3": "d"})} + ) + self.assertEqual(output, "d") + + # Numbers are numbers even if their digits are in the context. + @setup({"basic-syntax35": "{{ 1 }}"}) + def test_basic_syntax35(self): + output = self.engine.render_to_string("basic-syntax35", {"1": "abc"}) + self.assertEqual(output, "1") + + @setup({"basic-syntax36": "{{ 1.2 }}"}) + def test_basic_syntax36(self): + output = self.engine.render_to_string("basic-syntax36", {"1": "abc"}) + self.assertEqual(output, "1.2") + + @setup({"basic-syntax37": "{{ callable }}"}) + def test_basic_syntax37(self): + """ + Call methods in the top level of the context. + """ + output = self.engine.render_to_string( + "basic-syntax37", {"callable": lambda: "foo bar"} + ) + self.assertEqual(output, "foo bar") + + @setup({"basic-syntax38": "{{ var.callable }}"}) + def test_basic_syntax38(self): + """ + Call methods returned from dictionary lookups. + """ + output = self.engine.render_to_string( + "basic-syntax38", {"var": {"callable": lambda: "foo bar"}} + ) + self.assertEqual(output, "foo bar") + + @setup({"template": "{% block content %}"}) + def test_unclosed_block(self): + msg = "Unclosed tag on line 1: 'block'. Looking for one of: endblock." + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup({"template": "{% if a %}"}) + def test_unclosed_block2(self): + msg = "Unclosed tag on line 1: 'if'. Looking for one of: elif, else, endif." + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.render_to_string("template") + + @setup({"tpl-str": "%s", "tpl-percent": "%%", "tpl-weird-percent": "% %s"}) + def test_ignores_strings_that_look_like_format_interpolation(self): + output = self.engine.render_to_string("tpl-str") + self.assertEqual(output, "%s") + output = self.engine.render_to_string("tpl-percent") + self.assertEqual(output, "%%") + output = self.engine.render_to_string("tpl-weird-percent") + self.assertEqual(output, "% %s") + + +class BlockContextTests(SimpleTestCase): + def test_repr(self): + block_context = BlockContext() + block_context.add_blocks({"content": BlockNode("content", [])}) + self.assertEqual( + repr(block_context), + ", " + "{'content': []})>", + ) diff --git a/testbed/django__django/tests/template_tests/syntax_tests/test_builtins.py b/testbed/django__django/tests/template_tests/syntax_tests/test_builtins.py new file mode 100644 index 0000000000000000000000000000000000000000..6e81410556bb57bf66fdc1a40d87bac7557a3604 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/test_builtins.py @@ -0,0 +1,20 @@ +from django.test import SimpleTestCase + +from ..utils import setup + + +class BuiltinsTests(SimpleTestCase): + @setup({"builtins01": "{{ True }}"}) + def test_builtins01(self): + output = self.engine.render_to_string("builtins01") + self.assertEqual(output, "True") + + @setup({"builtins02": "{{ False }}"}) + def test_builtins02(self): + output = self.engine.render_to_string("builtins02") + self.assertEqual(output, "False") + + @setup({"builtins03": "{{ None }}"}) + def test_builtins03(self): + output = self.engine.render_to_string("builtins03") + self.assertEqual(output, "None") diff --git a/testbed/django__django/tests/template_tests/syntax_tests/test_cache.py b/testbed/django__django/tests/template_tests/syntax_tests/test_cache.py new file mode 100644 index 0000000000000000000000000000000000000000..9dcf9a6b5a2ec0253299e7d187023b38e2ad33d2 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/test_cache.py @@ -0,0 +1,218 @@ +from django.core.cache import cache +from django.template import Context, Engine, TemplateSyntaxError +from django.test import SimpleTestCase, override_settings + +from ..utils import setup + + +class CacheTagTests(SimpleTestCase): + libraries = { + "cache": "django.templatetags.cache", + "custom": "template_tests.templatetags.custom", + } + + def tearDown(self): + cache.clear() + + @setup({"cache03": "{% load cache %}{% cache 2 test %}cache03{% endcache %}"}) + def test_cache03(self): + output = self.engine.render_to_string("cache03") + self.assertEqual(output, "cache03") + + @setup( + { + "cache03": "{% load cache %}{% cache 2 test %}cache03{% endcache %}", + "cache04": "{% load cache %}{% cache 2 test %}cache04{% endcache %}", + } + ) + def test_cache04(self): + self.engine.render_to_string("cache03") + output = self.engine.render_to_string("cache04") + self.assertEqual(output, "cache03") + + @setup({"cache05": "{% load cache %}{% cache 2 test foo %}cache05{% endcache %}"}) + def test_cache05(self): + output = self.engine.render_to_string("cache05", {"foo": 1}) + self.assertEqual(output, "cache05") + + @setup({"cache06": "{% load cache %}{% cache 2 test foo %}cache06{% endcache %}"}) + def test_cache06(self): + output = self.engine.render_to_string("cache06", {"foo": 2}) + self.assertEqual(output, "cache06") + + @setup( + { + "cache05": "{% load cache %}{% cache 2 test foo %}cache05{% endcache %}", + "cache07": "{% load cache %}{% cache 2 test foo %}cache07{% endcache %}", + } + ) + def test_cache07(self): + context = {"foo": 1} + self.engine.render_to_string("cache05", context) + output = self.engine.render_to_string("cache07", context) + self.assertEqual(output, "cache05") + + @setup( + { + "cache06": "{% load cache %}{% cache 2 test foo %}cache06{% endcache %}", + "cache08": "{% load cache %}{% cache time test foo %}cache08{% endcache %}", + } + ) + def test_cache08(self): + """ + Allow first argument to be a variable. + """ + context = {"foo": 2, "time": 2} + self.engine.render_to_string("cache06", context) + output = self.engine.render_to_string("cache08", context) + self.assertEqual(output, "cache06") + + # Raise exception if we don't have at least 2 args, first one integer. + @setup({"cache11": "{% load cache %}{% cache %}{% endcache %}"}) + def test_cache11(self): + with self.assertRaises(TemplateSyntaxError): + self.engine.get_template("cache11") + + @setup({"cache12": "{% load cache %}{% cache 1 %}{% endcache %}"}) + def test_cache12(self): + with self.assertRaises(TemplateSyntaxError): + self.engine.get_template("cache12") + + @setup({"cache13": "{% load cache %}{% cache foo bar %}{% endcache %}"}) + def test_cache13(self): + with self.assertRaises(TemplateSyntaxError): + self.engine.render_to_string("cache13") + + @setup({"cache14": "{% load cache %}{% cache foo bar %}{% endcache %}"}) + def test_cache14(self): + with self.assertRaises(TemplateSyntaxError): + self.engine.render_to_string("cache14", {"foo": "fail"}) + + @setup({"cache15": "{% load cache %}{% cache foo bar %}{% endcache %}"}) + def test_cache15(self): + with self.assertRaises(TemplateSyntaxError): + self.engine.render_to_string("cache15", {"foo": []}) + + @setup({"cache16": "{% load cache %}{% cache 1 foo bar %}{% endcache %}"}) + def test_cache16(self): + """ + Regression test for #7460. + """ + output = self.engine.render_to_string( + "cache16", {"foo": "foo", "bar": "with spaces"} + ) + self.assertEqual(output, "") + + @setup( + { + "cache17": ( + "{% load cache %}{% cache 10 long_cache_key poem %}Some Content" + "{% endcache %}" + ) + } + ) + def test_cache17(self): + """ + Regression test for #11270. + """ + output = self.engine.render_to_string( + "cache17", + { + "poem": ( + "Oh freddled gruntbuggly/Thy micturations are to me/" + "As plurdled gabbleblotchits/On a lurgid bee/" + "That mordiously hath bitled out/Its earted jurtles/" + "Into a rancid festering/Or else I shall rend thee in the " + "gobberwarts with my blurglecruncheon/See if I don't." + ), + }, + ) + self.assertEqual(output, "Some Content") + + @setup( + { + "cache18": ( + '{% load cache custom %}{% cache 2|noop:"x y" cache18 %}cache18' + "{% endcache %}" + ) + } + ) + def test_cache18(self): + """ + Test whitespace in filter arguments + """ + output = self.engine.render_to_string("cache18") + self.assertEqual(output, "cache18") + + @setup( + { + "first": "{% load cache %}{% cache None fragment19 %}content{% endcache %}", + "second": ( + "{% load cache %}{% cache None fragment19 %}not rendered{% endcache %}" + ), + } + ) + def test_none_timeout(self): + """A timeout of None means "cache forever".""" + output = self.engine.render_to_string("first") + self.assertEqual(output, "content") + output = self.engine.render_to_string("second") + self.assertEqual(output, "content") + + +class CacheTests(SimpleTestCase): + @classmethod + def setUpClass(cls): + cls.engine = Engine(libraries={"cache": "django.templatetags.cache"}) + super().setUpClass() + + def test_cache_regression_20130(self): + t = self.engine.from_string( + "{% load cache %}{% cache 1 regression_20130 %}foo{% endcache %}" + ) + cachenode = t.nodelist[1] + self.assertEqual(cachenode.fragment_name, "regression_20130") + + @override_settings( + CACHES={ + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + "LOCATION": "default", + }, + "template_fragments": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + "LOCATION": "fragments", + }, + } + ) + def test_cache_fragment_cache(self): + """ + When a cache called "template_fragments" is present, the cache tag + will use it in preference to 'default' + """ + t1 = self.engine.from_string( + "{% load cache %}{% cache 1 fragment %}foo{% endcache %}" + ) + t2 = self.engine.from_string( + '{% load cache %}{% cache 1 fragment using="default" %}bar{% endcache %}' + ) + + ctx = Context() + o1 = t1.render(ctx) + o2 = t2.render(ctx) + + self.assertEqual(o1, "foo") + self.assertEqual(o2, "bar") + + def test_cache_missing_backend(self): + """ + When a cache that doesn't exist is specified, the cache tag will + raise a TemplateSyntaxError + '""" + t = self.engine.from_string( + '{% load cache %}{% cache 1 backend using="unknown" %}bar{% endcache %}' + ) + + ctx = Context() + with self.assertRaises(TemplateSyntaxError): + t.render(ctx) diff --git a/testbed/django__django/tests/template_tests/syntax_tests/test_comment.py b/testbed/django__django/tests/template_tests/syntax_tests/test_comment.py new file mode 100644 index 0000000000000000000000000000000000000000..58dbe88b364ce6077cb6d86dddc19a1065b3bff5 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/test_comment.py @@ -0,0 +1,95 @@ +from django.test import SimpleTestCase + +from ..utils import setup + + +class CommentSyntaxTests(SimpleTestCase): + @setup({"comment-syntax01": "{# this is hidden #}hello"}) + def test_comment_syntax01(self): + output = self.engine.render_to_string("comment-syntax01") + self.assertEqual(output, "hello") + + @setup({"comment-syntax02": "{# this is hidden #}hello{# foo #}"}) + def test_comment_syntax02(self): + output = self.engine.render_to_string("comment-syntax02") + self.assertEqual(output, "hello") + + @setup({"comment-syntax03": "foo{# {% if %} #}"}) + def test_comment_syntax03(self): + output = self.engine.render_to_string("comment-syntax03") + self.assertEqual(output, "foo") + + @setup({"comment-syntax04": "foo{# {% endblock %} #}"}) + def test_comment_syntax04(self): + output = self.engine.render_to_string("comment-syntax04") + self.assertEqual(output, "foo") + + @setup({"comment-syntax05": "foo{# {% somerandomtag %} #}"}) + def test_comment_syntax05(self): + output = self.engine.render_to_string("comment-syntax05") + self.assertEqual(output, "foo") + + @setup({"comment-syntax06": "foo{# {% #}"}) + def test_comment_syntax06(self): + output = self.engine.render_to_string("comment-syntax06") + self.assertEqual(output, "foo") + + @setup({"comment-syntax07": "foo{# %} #}"}) + def test_comment_syntax07(self): + output = self.engine.render_to_string("comment-syntax07") + self.assertEqual(output, "foo") + + @setup({"comment-syntax08": "foo{# %} #}bar"}) + def test_comment_syntax08(self): + output = self.engine.render_to_string("comment-syntax08") + self.assertEqual(output, "foobar") + + @setup({"comment-syntax09": "foo{# {{ #}"}) + def test_comment_syntax09(self): + output = self.engine.render_to_string("comment-syntax09") + self.assertEqual(output, "foo") + + @setup({"comment-syntax10": "foo{# }} #}"}) + def test_comment_syntax10(self): + output = self.engine.render_to_string("comment-syntax10") + self.assertEqual(output, "foo") + + @setup({"comment-syntax11": "foo{# { #}"}) + def test_comment_syntax11(self): + output = self.engine.render_to_string("comment-syntax11") + self.assertEqual(output, "foo") + + @setup({"comment-syntax12": "foo{# } #}"}) + def test_comment_syntax12(self): + output = self.engine.render_to_string("comment-syntax12") + self.assertEqual(output, "foo") + + @setup({"comment-tag01": "{% comment %}this is hidden{% endcomment %}hello"}) + def test_comment_tag01(self): + output = self.engine.render_to_string("comment-tag01") + self.assertEqual(output, "hello") + + @setup( + { + "comment-tag02": "{% comment %}this is hidden{% endcomment %}" + "hello{% comment %}foo{% endcomment %}" + } + ) + def test_comment_tag02(self): + output = self.engine.render_to_string("comment-tag02") + self.assertEqual(output, "hello") + + @setup({"comment-tag03": "foo{% comment %} {% if %} {% endcomment %}"}) + def test_comment_tag03(self): + output = self.engine.render_to_string("comment-tag03") + self.assertEqual(output, "foo") + + @setup({"comment-tag04": "foo{% comment %} {% endblock %} {% endcomment %}"}) + def test_comment_tag04(self): + output = self.engine.render_to_string("comment-tag04") + self.assertEqual(output, "foo") + + @setup({"comment-tag05": "foo{% comment %} {% somerandomtag %} {% endcomment %}"}) + def test_comment_tag05(self): + output = self.engine.render_to_string("comment-tag05") + self.assertEqual(output, "foo") diff --git a/testbed/django__django/tests/template_tests/syntax_tests/test_cycle.py b/testbed/django__django/tests/template_tests/syntax_tests/test_cycle.py new file mode 100644 index 0000000000000000000000000000000000000000..cff0363e38adb28d1a27c54de35771632c897dfd --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/test_cycle.py @@ -0,0 +1,230 @@ +from django.template import TemplateSyntaxError +from django.test import SimpleTestCase + +from ..utils import setup + + +class CycleTagTests(SimpleTestCase): + @setup({"cycle01": "{% cycle a %}"}) + def test_cycle01(self): + msg = "No named cycles in template. 'a' is not defined" + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.get_template("cycle01") + + @setup({"cycle05": "{% cycle %}"}) + def test_cycle05(self): + msg = "'cycle' tag requires at least two arguments" + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.get_template("cycle05") + + @setup({"cycle07": "{% cycle a,b,c as foo %}{% cycle bar %}"}) + def test_cycle07(self): + msg = "Could not parse the remainder: ',b,c' from 'a,b,c'" + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.get_template("cycle07") + + @setup({"cycle10": "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}"}) + def test_cycle10(self): + output = self.engine.render_to_string("cycle10") + self.assertEqual(output, "ab") + + @setup({"cycle11": "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}{% cycle abc %}"}) + def test_cycle11(self): + output = self.engine.render_to_string("cycle11") + self.assertEqual(output, "abc") + + @setup( + { + "cycle12": ( + "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}{% cycle abc %}" + "{% cycle abc %}" + ) + } + ) + def test_cycle12(self): + output = self.engine.render_to_string("cycle12") + self.assertEqual(output, "abca") + + @setup({"cycle13": "{% for i in test %}{% cycle 'a' 'b' %}{{ i }},{% endfor %}"}) + def test_cycle13(self): + output = self.engine.render_to_string("cycle13", {"test": list(range(5))}) + self.assertEqual(output, "a0,b1,a2,b3,a4,") + + @setup({"cycle14": "{% cycle one two as foo %}{% cycle foo %}"}) + def test_cycle14(self): + output = self.engine.render_to_string("cycle14", {"one": "1", "two": "2"}) + self.assertEqual(output, "12") + + @setup({"cycle15": "{% for i in test %}{% cycle aye bee %}{{ i }},{% endfor %}"}) + def test_cycle15(self): + output = self.engine.render_to_string( + "cycle15", {"test": list(range(5)), "aye": "a", "bee": "b"} + ) + self.assertEqual(output, "a0,b1,a2,b3,a4,") + + @setup({"cycle16": "{% cycle one|lower two as foo %}{% cycle foo %}"}) + def test_cycle16(self): + output = self.engine.render_to_string("cycle16", {"one": "A", "two": "2"}) + self.assertEqual(output, "a2") + + @setup( + { + "cycle17": "{% cycle 'a' 'b' 'c' as abc silent %}" + "{% cycle abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}" + } + ) + def test_cycle17(self): + output = self.engine.render_to_string("cycle17") + self.assertEqual(output, "") + + @setup({"cycle18": "{% cycle 'a' 'b' 'c' as foo invalid_flag %}"}) + def test_cycle18(self): + msg = "Only 'silent' flag is allowed after cycle's name, not 'invalid_flag'." + with self.assertRaisesMessage(TemplateSyntaxError, msg): + self.engine.get_template("cycle18") + + @setup({"cycle19": "{% cycle 'a' 'b' as silent %}{% cycle silent %}"}) + def test_cycle19(self): + output = self.engine.render_to_string("cycle19") + self.assertEqual(output, "ab") + + @setup({"cycle20": "{% cycle one two as foo %} & {% cycle foo %}"}) + def test_cycle20(self): + output = self.engine.render_to_string( + "cycle20", {"two": "C & D", "one": "A & B"} + ) + self.assertEqual(output, "A & B & C & D") + + @setup( + { + "cycle21": "{% filter force_escape %}" + "{% cycle one two as foo %} & {% cycle foo %}{% endfilter %}" + } + ) + def test_cycle21(self): + output = self.engine.render_to_string( + "cycle21", {"two": "C & D", "one": "A & B"} + ) + self.assertEqual(output, "A &amp; B & C &amp; D") + + @setup( + { + "cycle22": ( + "{% for x in values %}{% cycle 'a' 'b' 'c' as abc silent %}{{ x }}" + "{% endfor %}" + ) + } + ) + def test_cycle22(self): + output = self.engine.render_to_string("cycle22", {"values": [1, 2, 3, 4]}) + self.assertEqual(output, "1234") + + @setup( + { + "cycle23": "{% for x in values %}" + "{% cycle 'a' 'b' 'c' as abc silent %}{{ abc }}{{ x }}{% endfor %}" + } + ) + def test_cycle23(self): + output = self.engine.render_to_string("cycle23", {"values": [1, 2, 3, 4]}) + self.assertEqual(output, "a1b2c3a4") + + @setup( + { + "cycle24": ( + "{% for x in values %}" + "{% cycle 'a' 'b' 'c' as abc silent %}{% include 'included-cycle' %}" + "{% endfor %}" + ), + "included-cycle": "{{ abc }}", + } + ) + def test_cycle24(self): + output = self.engine.render_to_string("cycle24", {"values": [1, 2, 3, 4]}) + self.assertEqual(output, "abca") + + @setup({"cycle25": "{% cycle a as abc %}"}) + def test_cycle25(self): + output = self.engine.render_to_string("cycle25", {"a": "<"}) + self.assertEqual(output, "<") + + @setup({"cycle26": "{% cycle a b as ab %}{% cycle ab %}"}) + def test_cycle26(self): + output = self.engine.render_to_string("cycle26", {"a": "<", "b": ">"}) + self.assertEqual(output, "<>") + + @setup( + { + "cycle27": ( + "{% autoescape off %}{% cycle a b as ab %}{% cycle ab %}" + "{% endautoescape %}" + ) + } + ) + def test_cycle27(self): + output = self.engine.render_to_string("cycle27", {"a": "<", "b": ">"}) + self.assertEqual(output, "<>") + + @setup({"cycle28": "{% cycle a|safe b as ab %}{% cycle ab %}"}) + def test_cycle28(self): + output = self.engine.render_to_string("cycle28", {"a": "<", "b": ">"}) + self.assertEqual(output, "<>") + + @setup( + { + "cycle29": "{% cycle 'a' 'b' 'c' as cycler silent %}" + "{% for x in values %}" + "{% ifchanged x %}" + "{% cycle cycler %}{{ cycler }}" + "{% else %}" + "{{ cycler }}" + "{% endifchanged %}" + "{% endfor %}" + } + ) + def test_cycle29(self): + """ + A named {% cycle %} tag works inside an {% ifchanged %} block and a + {% for %} loop. + """ + output = self.engine.render_to_string( + "cycle29", {"values": [1, 2, 3, 4, 5, 6, 7, 8, 8, 8, 9, 9]} + ) + self.assertEqual(output, "bcabcabcccaa") + + @setup( + { + "cycle30": "{% cycle 'a' 'b' 'c' as cycler silent %}" + "{% for x in values %}" + "{% with doesnothing=irrelevant %}" + "{% ifchanged x %}" + "{% cycle cycler %}{{ cycler }}" + "{% else %}" + "{{ cycler }}" + "{% endifchanged %}" + "{% endwith %}" + "{% endfor %}" + } + ) + def test_cycle30(self): + """ + A {% with %} tag shouldn't reset the {% cycle %} variable. + """ + output = self.engine.render_to_string( + "cycle30", {"irrelevant": 1, "values": [1, 2, 3, 4, 5, 6, 7, 8, 8, 8, 9, 9]} + ) + self.assertEqual(output, "bcabcabcccaa") + + @setup( + { + "undefined_cycle": "{% cycle 'a' 'b' 'c' as cycler silent %}" + "{% for x in values %}" + "{% cycle undefined %}{{ cycler }}" + "{% endfor %}" + } + ) + def test_cycle_undefined(self): + with self.assertRaisesMessage( + TemplateSyntaxError, "Named cycle 'undefined' does not exist" + ): + self.engine.render_to_string("undefined_cycle") diff --git a/testbed/django__django/tests/template_tests/syntax_tests/test_debug.py b/testbed/django__django/tests/template_tests/syntax_tests/test_debug.py new file mode 100644 index 0000000000000000000000000000000000000000..cbd71fa51a42833eeeb80281c3df44ad0ba1cba9 --- /dev/null +++ b/testbed/django__django/tests/template_tests/syntax_tests/test_debug.py @@ -0,0 +1,45 @@ +from django.contrib.auth.models import Group +from django.test import SimpleTestCase, override_settings + +from ..utils import setup + + +@override_settings(DEBUG=True) +class DebugTests(SimpleTestCase): + @override_settings(DEBUG=False) + @setup({"non_debug": "{% debug %}"}) + def test_non_debug(self): + output = self.engine.render_to_string("non_debug", {}) + self.assertEqual(output, "") + + @setup({"modules": "{% debug %}"}) + def test_modules(self): + output = self.engine.render_to_string("modules", {}) + self.assertIn( + "'django': <module 'django' ", + output, + ) + + @setup({"plain": "{% debug %}"}) + def test_plain(self): + output = self.engine.render_to_string("plain", {"a": 1}) + self.assertTrue( + output.startswith( + "{'a': 1}" + "{'False': False, 'None': None, " + "'True': True}\n\n{" + ) + ) + + @setup({"non_ascii": "{% debug %}"}) + def test_non_ascii(self): + group = Group(name="清風") + output = self.engine.render_to_string("non_ascii", {"group": group}) + self.assertTrue(output.startswith("{'group': <Group: 清風>}")) + + @setup({"script": "{% debug %}"}) + def test_script(self): + output = self.engine.render_to_string("script", {"frag": " +{% endblock %} diff --git a/testbed/django__django/tests/templates/custom_admin/delete_confirmation.html b/testbed/django__django/tests/templates/custom_admin/delete_confirmation.html new file mode 100644 index 0000000000000000000000000000000000000000..9353c5bfc8e4a68290c9528ca6048ee42e72e8e8 --- /dev/null +++ b/testbed/django__django/tests/templates/custom_admin/delete_confirmation.html @@ -0,0 +1 @@ +{% extends "admin/delete_confirmation.html" %} diff --git a/testbed/django__django/tests/templates/custom_admin/delete_selected_confirmation.html b/testbed/django__django/tests/templates/custom_admin/delete_selected_confirmation.html new file mode 100644 index 0000000000000000000000000000000000000000..9268536092403f34b97a6ad4598c069c4b621a11 --- /dev/null +++ b/testbed/django__django/tests/templates/custom_admin/delete_selected_confirmation.html @@ -0,0 +1 @@ +{% extends "admin/delete_selected_confirmation.html" %} diff --git a/testbed/django__django/tests/templates/custom_admin/index.html b/testbed/django__django/tests/templates/custom_admin/index.html new file mode 100644 index 0000000000000000000000000000000000000000..75b6ca3d18bbfc4e6be21814de7e72e3c053be93 --- /dev/null +++ b/testbed/django__django/tests/templates/custom_admin/index.html @@ -0,0 +1,6 @@ +{% extends "admin/index.html" %} + +{% block content %} +Hello from a custom index template {{ foo }} +{{ block.super }} +{% endblock %} diff --git a/testbed/django__django/tests/templates/custom_admin/login.html b/testbed/django__django/tests/templates/custom_admin/login.html new file mode 100644 index 0000000000000000000000000000000000000000..e10a26952fa66803937237dc4676cee207d76ef0 --- /dev/null +++ b/testbed/django__django/tests/templates/custom_admin/login.html @@ -0,0 +1,6 @@ +{% extends "admin/login.html" %} + +{% block content %} +Hello from a custom login template +{{ block.super }} +{% endblock %} diff --git a/testbed/django__django/tests/templates/custom_admin/logout.html b/testbed/django__django/tests/templates/custom_admin/logout.html new file mode 100644 index 0000000000000000000000000000000000000000..3a9301b6c641a10ffa7f747723179cb77c5af19b --- /dev/null +++ b/testbed/django__django/tests/templates/custom_admin/logout.html @@ -0,0 +1,6 @@ +{% extends "registration/logged_out.html" %} + +{% block content %} +Hello from a custom logout template +{{ block.super }} +{% endblock %} diff --git a/testbed/django__django/tests/templates/custom_admin/object_history.html b/testbed/django__django/tests/templates/custom_admin/object_history.html new file mode 100644 index 0000000000000000000000000000000000000000..aee3b5bcba7a8282889b4ec9323b3f28fde04bac --- /dev/null +++ b/testbed/django__django/tests/templates/custom_admin/object_history.html @@ -0,0 +1 @@ +{% extends "admin/object_history.html" %} diff --git a/testbed/django__django/tests/templates/custom_admin/password_change_done.html b/testbed/django__django/tests/templates/custom_admin/password_change_done.html new file mode 100644 index 0000000000000000000000000000000000000000..0e4a7f25ec7b7e2d9a0ee71da832dadb050deb4a --- /dev/null +++ b/testbed/django__django/tests/templates/custom_admin/password_change_done.html @@ -0,0 +1,6 @@ +{% extends "registration/password_change_done.html" %} + +{% block content %} +Hello from a custom password change done template +{{ block.super }} +{% endblock %} diff --git a/testbed/django__django/tests/templates/custom_admin/password_change_form.html b/testbed/django__django/tests/templates/custom_admin/password_change_form.html new file mode 100644 index 0000000000000000000000000000000000000000..12d911002e6e4fdf295c242662870f50befe5e72 --- /dev/null +++ b/testbed/django__django/tests/templates/custom_admin/password_change_form.html @@ -0,0 +1,7 @@ +{% extends "registration/password_change_form.html" %} + +{% block content %} +{{ spam }} +Hello from a custom password change form template +{{ block.super }} +{% endblock %} diff --git a/testbed/django__django/tests/templates/custom_admin/popup_response.html b/testbed/django__django/tests/templates/custom_admin/popup_response.html new file mode 100644 index 0000000000000000000000000000000000000000..fd21d13d148d84370879f2cb4c9b46ccf0df5ae3 --- /dev/null +++ b/testbed/django__django/tests/templates/custom_admin/popup_response.html @@ -0,0 +1 @@ +{% extends "admin/popup_response.html" %} diff --git a/testbed/django__django/tests/templates/extended.html b/testbed/django__django/tests/templates/extended.html new file mode 100644 index 0000000000000000000000000000000000000000..fb2b6d6e4923a07dd89d9e8416201b3fe8677539 --- /dev/null +++ b/testbed/django__django/tests/templates/extended.html @@ -0,0 +1,5 @@ +{% extends "base.html" %} +{% block title %}Extended template{% endblock %} +{% block content %} +This is just a template extending the base. +{% endblock %} diff --git a/testbed/django__django/tests/templates/form_view.html b/testbed/django__django/tests/templates/form_view.html new file mode 100644 index 0000000000000000000000000000000000000000..16945a018cf6ab15a140c5f7e6c917f51dd988e3 --- /dev/null +++ b/testbed/django__django/tests/templates/form_view.html @@ -0,0 +1,15 @@ +{% extends "base.html" %} +{% block title %}Submit data{% endblock %} +{% block content %} +

    {{ message }}

    +
    +{% if form.errors %} +

    Please correct the errors below:

    +{% endif %} +
      +{{ form }} +
    • +
    +
    + +{% endblock %} diff --git a/testbed/django__django/tests/templates/login.html b/testbed/django__django/tests/templates/login.html new file mode 100644 index 0000000000000000000000000000000000000000..ddc3224009e6339f1dccc7aeaaa6e89bc2e75f04 --- /dev/null +++ b/testbed/django__django/tests/templates/login.html @@ -0,0 +1,17 @@ +{% extends "base.html" %} +{% block title %}Login{% endblock %} +{% block content %} +{% if form.errors %} +

    Your username and password didn't match. Please try again.

    +{% endif %} + +
    + + + +
    {{ form.username }}
    {{ form.password }}
    + + + +
    +{% endblock %} diff --git a/testbed/django__django/tests/templates/views/article_archive_day.html b/testbed/django__django/tests/templates/views/article_archive_day.html new file mode 100644 index 0000000000000000000000000000000000000000..bd2d67f6f36f4e266933f307edb2a8e1ed1f65d1 --- /dev/null +++ b/testbed/django__django/tests/templates/views/article_archive_day.html @@ -0,0 +1 @@ +This template intentionally left blank diff --git a/testbed/django__django/tests/templates/views/article_archive_month.html b/testbed/django__django/tests/templates/views/article_archive_month.html new file mode 100644 index 0000000000000000000000000000000000000000..bd2d67f6f36f4e266933f307edb2a8e1ed1f65d1 --- /dev/null +++ b/testbed/django__django/tests/templates/views/article_archive_month.html @@ -0,0 +1 @@ +This template intentionally left blank diff --git a/testbed/django__django/tests/templates/views/article_confirm_delete.html b/testbed/django__django/tests/templates/views/article_confirm_delete.html new file mode 100644 index 0000000000000000000000000000000000000000..bd2d67f6f36f4e266933f307edb2a8e1ed1f65d1 --- /dev/null +++ b/testbed/django__django/tests/templates/views/article_confirm_delete.html @@ -0,0 +1 @@ +This template intentionally left blank diff --git a/testbed/django__django/tests/templates/views/article_detail.html b/testbed/django__django/tests/templates/views/article_detail.html new file mode 100644 index 0000000000000000000000000000000000000000..952299db918d73ad2be29f8af7d2d09f7ebb5df1 --- /dev/null +++ b/testbed/django__django/tests/templates/views/article_detail.html @@ -0,0 +1 @@ +Article detail template. diff --git a/testbed/django__django/tests/templates/views/article_form.html b/testbed/django__django/tests/templates/views/article_form.html new file mode 100644 index 0000000000000000000000000000000000000000..e2aa1f9535ab58dd7129bd34616096ab97dc8359 --- /dev/null +++ b/testbed/django__django/tests/templates/views/article_form.html @@ -0,0 +1,3 @@ +Article form template. + +{{ form.errors }} diff --git a/testbed/django__django/tests/templates/views/article_list.html b/testbed/django__django/tests/templates/views/article_list.html new file mode 100644 index 0000000000000000000000000000000000000000..1d9335ea3ec0b4d97f9d610d3d1b097579f88e89 --- /dev/null +++ b/testbed/django__django/tests/templates/views/article_list.html @@ -0,0 +1 @@ +{{ object_list }} diff --git a/testbed/django__django/tests/templates/views/datearticle_archive_month.html b/testbed/django__django/tests/templates/views/datearticle_archive_month.html new file mode 100644 index 0000000000000000000000000000000000000000..bd2d67f6f36f4e266933f307edb2a8e1ed1f65d1 --- /dev/null +++ b/testbed/django__django/tests/templates/views/datearticle_archive_month.html @@ -0,0 +1 @@ +This template intentionally left blank diff --git a/testbed/django__django/tests/templates/views/urlarticle_detail.html b/testbed/django__django/tests/templates/views/urlarticle_detail.html new file mode 100644 index 0000000000000000000000000000000000000000..924f310300129179f463b3cda58d4a47d65c9746 --- /dev/null +++ b/testbed/django__django/tests/templates/views/urlarticle_detail.html @@ -0,0 +1 @@ +UrlArticle detail template. diff --git a/testbed/django__django/tests/templates/views/urlarticle_form.html b/testbed/django__django/tests/templates/views/urlarticle_form.html new file mode 100644 index 0000000000000000000000000000000000000000..578dd98ca6ab2bf3693ee90f86958c7b9499b870 --- /dev/null +++ b/testbed/django__django/tests/templates/views/urlarticle_form.html @@ -0,0 +1,3 @@ +UrlArticle form template. + +{{ form.errors }} diff --git a/testbed/django__django/tests/test_client/__init__.py b/testbed/django__django/tests/test_client/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_client/auth_backends.py b/testbed/django__django/tests/test_client/auth_backends.py new file mode 100644 index 0000000000000000000000000000000000000000..97a2763aaab3784064df9d4550c3e77ed9b918c1 --- /dev/null +++ b/testbed/django__django/tests/test_client/auth_backends.py @@ -0,0 +1,9 @@ +from django.contrib.auth.backends import ModelBackend + + +class TestClientBackend(ModelBackend): + pass + + +class BackendWithoutGetUserMethod: + pass diff --git a/testbed/django__django/tests/test_client/test_conditional_content_removal.py b/testbed/django__django/tests/test_client/test_conditional_content_removal.py new file mode 100644 index 0000000000000000000000000000000000000000..d8938062febdcd1294b62a4e53ba2e3fb531f631 --- /dev/null +++ b/testbed/django__django/tests/test_client/test_conditional_content_removal.py @@ -0,0 +1,56 @@ +import gzip + +from django.http import HttpRequest, HttpResponse, StreamingHttpResponse +from django.test import SimpleTestCase +from django.test.client import conditional_content_removal + + +class ConditionalContentTests(SimpleTestCase): + def test_conditional_content_removal(self): + """ + Content is removed from regular and streaming responses with a + status_code of 100-199, 204, 304, or a method of "HEAD". + """ + req = HttpRequest() + + # Do nothing for 200 responses. + res = HttpResponse("abc") + conditional_content_removal(req, res) + self.assertEqual(res.content, b"abc") + + res = StreamingHttpResponse(["abc"]) + conditional_content_removal(req, res) + self.assertEqual(b"".join(res), b"abc") + + # Strip content for some status codes. + for status_code in (100, 150, 199, 204, 304): + res = HttpResponse("abc", status=status_code) + conditional_content_removal(req, res) + self.assertEqual(res.content, b"") + + res = StreamingHttpResponse(["abc"], status=status_code) + conditional_content_removal(req, res) + self.assertEqual(b"".join(res), b"") + + # Issue #20472 + abc = gzip.compress(b"abc") + res = HttpResponse(abc, status=304) + res["Content-Encoding"] = "gzip" + conditional_content_removal(req, res) + self.assertEqual(res.content, b"") + + res = StreamingHttpResponse([abc], status=304) + res["Content-Encoding"] = "gzip" + conditional_content_removal(req, res) + self.assertEqual(b"".join(res), b"") + + # Strip content for HEAD requests. + req.method = "HEAD" + + res = HttpResponse("abc") + conditional_content_removal(req, res) + self.assertEqual(res.content, b"") + + res = StreamingHttpResponse(["abc"]) + conditional_content_removal(req, res) + self.assertEqual(b"".join(res), b"") diff --git a/testbed/django__django/tests/test_client/test_fakepayload.py b/testbed/django__django/tests/test_client/test_fakepayload.py new file mode 100644 index 0000000000000000000000000000000000000000..222bef3b00c75caa4dfa4a6cdcf98047afcaf8d9 --- /dev/null +++ b/testbed/django__django/tests/test_client/test_fakepayload.py @@ -0,0 +1,13 @@ +from django.test import SimpleTestCase +from django.test.client import FakePayload + + +class FakePayloadTests(SimpleTestCase): + def test_write_after_read(self): + payload = FakePayload() + for operation in [payload.read, payload.readline]: + with self.subTest(operation=operation.__name__): + operation() + msg = "Unable to write a payload after it's been read" + with self.assertRaisesMessage(ValueError, msg): + payload.write(b"abc") diff --git a/testbed/django__django/tests/test_client/tests.py b/testbed/django__django/tests/test_client/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..a5e980f3d05b5051ea7e5d9375ba0f20086e9f8d --- /dev/null +++ b/testbed/django__django/tests/test_client/tests.py @@ -0,0 +1,1248 @@ +""" +Testing using the Test Client + +The test client is a class that can act like a simple +browser for testing purposes. + +It allows the user to compose GET and POST requests, and +obtain the response that the server gave to those requests. +The server Response objects are annotated with the details +of the contexts and templates that were rendered during the +process of serving the request. + +``Client`` objects are stateful - they will retain cookie (and +thus session) details for the lifetime of the ``Client`` instance. + +This is not intended as a replacement for Twill, Selenium, or +other browser automation frameworks - it is here to allow +testing against the contexts and templates produced by a view, +rather than the HTML rendered to the end-user. + +""" +import copy +import itertools +import tempfile +from unittest import mock + +from django.contrib.auth.models import User +from django.core import mail +from django.http import HttpResponse, HttpResponseNotAllowed +from django.test import ( + AsyncRequestFactory, + Client, + RequestFactory, + SimpleTestCase, + TestCase, + modify_settings, + override_settings, +) +from django.urls import reverse_lazy +from django.utils.decorators import async_only_middleware +from django.views.generic import RedirectView + +from .views import TwoArgException, get_view, post_view, trace_view + + +def middleware_urlconf(get_response): + def middleware(request): + request.urlconf = "test_client.urls_middleware_urlconf" + return get_response(request) + + return middleware + + +@async_only_middleware +def async_middleware_urlconf(get_response): + async def middleware(request): + request.urlconf = "test_client.urls_middleware_urlconf" + return await get_response(request) + + return middleware + + +@override_settings(ROOT_URLCONF="test_client.urls") +class ClientTest(TestCase): + @classmethod + def setUpTestData(cls): + cls.u1 = User.objects.create_user(username="testclient", password="password") + cls.u2 = User.objects.create_user( + username="inactive", password="password", is_active=False + ) + + def test_get_view(self): + "GET a view" + # The data is ignored, but let's check it doesn't crash the system + # anyway. + data = {"var": "\xf2"} + response = self.client.get("/get_view/", data) + + # Check some response details + self.assertContains(response, "This is a test") + self.assertEqual(response.context["var"], "\xf2") + self.assertEqual(response.templates[0].name, "GET Template") + + def test_copy_response(self): + tests = ["/cbv_view/", "/get_view/"] + for url in tests: + with self.subTest(url=url): + response = self.client.get(url) + response_copy = copy.copy(response) + self.assertEqual(repr(response), repr(response_copy)) + self.assertIs(response_copy.client, response.client) + self.assertIs(response_copy.resolver_match, response.resolver_match) + self.assertIs(response_copy.wsgi_request, response.wsgi_request) + + async def test_copy_response_async(self): + response = await self.async_client.get("/async_get_view/") + response_copy = copy.copy(response) + self.assertEqual(repr(response), repr(response_copy)) + self.assertIs(response_copy.client, response.client) + self.assertIs(response_copy.resolver_match, response.resolver_match) + self.assertIs(response_copy.asgi_request, response.asgi_request) + + def test_query_string_encoding(self): + # WSGI requires latin-1 encoded strings. + response = self.client.get("/get_view/?var=1\ufffd") + self.assertEqual(response.context["var"], "1\ufffd") + + def test_get_data_none(self): + msg = ( + "Cannot encode None for key 'value' in a query string. Did you " + "mean to pass an empty string or omit the value?" + ) + with self.assertRaisesMessage(TypeError, msg): + self.client.get("/get_view/", {"value": None}) + + def test_get_post_view(self): + "GET a view that normally expects POSTs" + response = self.client.get("/post_view/", {}) + + # Check some response details + self.assertEqual(response.status_code, 200) + self.assertEqual(response.templates[0].name, "Empty GET Template") + self.assertTemplateUsed(response, "Empty GET Template") + self.assertTemplateNotUsed(response, "Empty POST Template") + + def test_empty_post(self): + "POST an empty dictionary to a view" + response = self.client.post("/post_view/", {}) + + # Check some response details + self.assertEqual(response.status_code, 200) + self.assertEqual(response.templates[0].name, "Empty POST Template") + self.assertTemplateNotUsed(response, "Empty GET Template") + self.assertTemplateUsed(response, "Empty POST Template") + + def test_post(self): + "POST some data to a view" + post_data = {"value": 37} + response = self.client.post("/post_view/", post_data) + + # Check some response details + self.assertContains(response, "Data received") + self.assertEqual(response.context["data"], "37") + self.assertEqual(response.templates[0].name, "POST Template") + + def test_post_data_none(self): + msg = ( + "Cannot encode None for key 'value' as POST data. Did you mean " + "to pass an empty string or omit the value?" + ) + with self.assertRaisesMessage(TypeError, msg): + self.client.post("/post_view/", {"value": None}) + + def test_json_serialization(self): + """The test client serializes JSON data.""" + methods = ("post", "put", "patch", "delete") + tests = ( + ({"value": 37}, {"value": 37}), + ([37, True], [37, True]), + ((37, False), [37, False]), + ) + for method in methods: + with self.subTest(method=method): + for data, expected in tests: + with self.subTest(data): + client_method = getattr(self.client, method) + method_name = method.upper() + response = client_method( + "/json_view/", data, content_type="application/json" + ) + self.assertContains(response, "Viewing %s page." % method_name) + self.assertEqual(response.context["data"], expected) + + def test_json_encoder_argument(self): + """The test Client accepts a json_encoder.""" + mock_encoder = mock.MagicMock() + mock_encoding = mock.MagicMock() + mock_encoder.return_value = mock_encoding + mock_encoding.encode.return_value = '{"value": 37}' + + client = self.client_class(json_encoder=mock_encoder) + # Vendored tree JSON content types are accepted. + client.post( + "/json_view/", {"value": 37}, content_type="application/vnd.api+json" + ) + self.assertTrue(mock_encoder.called) + self.assertTrue(mock_encoding.encode.called) + + def test_put(self): + response = self.client.put("/put_view/", {"foo": "bar"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.templates[0].name, "PUT Template") + self.assertEqual(response.context["data"], "{'foo': 'bar'}") + self.assertEqual(response.context["Content-Length"], "14") + + def test_trace(self): + """TRACE a view""" + response = self.client.trace("/trace_view/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["method"], "TRACE") + self.assertEqual(response.templates[0].name, "TRACE Template") + + def test_response_headers(self): + "Check the value of HTTP headers returned in a response" + response = self.client.get("/header_view/") + + self.assertEqual(response.headers["X-DJANGO-TEST"], "Slartibartfast") + + def test_response_attached_request(self): + """ + The returned response has a ``request`` attribute with the originating + environ dict and a ``wsgi_request`` with the originating WSGIRequest. + """ + response = self.client.get("/header_view/") + + self.assertTrue(hasattr(response, "request")) + self.assertTrue(hasattr(response, "wsgi_request")) + for key, value in response.request.items(): + self.assertIn(key, response.wsgi_request.environ) + self.assertEqual(response.wsgi_request.environ[key], value) + + def test_response_resolver_match(self): + """ + The response contains a ResolverMatch instance. + """ + response = self.client.get("/header_view/") + self.assertTrue(hasattr(response, "resolver_match")) + + def test_response_resolver_match_redirect_follow(self): + """ + The response ResolverMatch instance contains the correct + information when following redirects. + """ + response = self.client.get("/redirect_view/", follow=True) + self.assertEqual(response.resolver_match.url_name, "get_view") + + def test_response_resolver_match_regular_view(self): + """ + The response ResolverMatch instance contains the correct + information when accessing a regular view. + """ + response = self.client.get("/get_view/") + self.assertEqual(response.resolver_match.url_name, "get_view") + + def test_response_resolver_match_class_based_view(self): + """ + The response ResolverMatch instance can be used to access the CBV view + class. + """ + response = self.client.get("/accounts/") + self.assertIs(response.resolver_match.func.view_class, RedirectView) + + @modify_settings(MIDDLEWARE={"prepend": "test_client.tests.middleware_urlconf"}) + def test_response_resolver_match_middleware_urlconf(self): + response = self.client.get("/middleware_urlconf_view/") + self.assertEqual(response.resolver_match.url_name, "middleware_urlconf_view") + + def test_raw_post(self): + "POST raw data (with a content type) to a view" + test_doc = """ + BlinkMalcolm Gladwell + + """ + response = self.client.post( + "/raw_post_view/", test_doc, content_type="text/xml" + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.templates[0].name, "Book template") + self.assertEqual(response.content, b"Blink - Malcolm Gladwell") + + def test_insecure(self): + "GET a URL through http" + response = self.client.get("/secure_view/", secure=False) + self.assertFalse(response.test_was_secure_request) + self.assertEqual(response.test_server_port, "80") + + def test_secure(self): + "GET a URL through https" + response = self.client.get("/secure_view/", secure=True) + self.assertTrue(response.test_was_secure_request) + self.assertEqual(response.test_server_port, "443") + + def test_redirect(self): + "GET a URL that redirects elsewhere" + response = self.client.get("/redirect_view/") + self.assertRedirects(response, "/get_view/") + + def test_redirect_with_query(self): + "GET a URL that redirects with given GET parameters" + response = self.client.get("/redirect_view/", {"var": "value"}) + self.assertRedirects(response, "/get_view/?var=value") + + def test_redirect_with_query_ordering(self): + """assertRedirects() ignores the order of query string parameters.""" + response = self.client.get("/redirect_view/", {"var": "value", "foo": "bar"}) + self.assertRedirects(response, "/get_view/?var=value&foo=bar") + self.assertRedirects(response, "/get_view/?foo=bar&var=value") + + def test_permanent_redirect(self): + "GET a URL that redirects permanently elsewhere" + response = self.client.get("/permanent_redirect_view/") + self.assertRedirects(response, "/get_view/", status_code=301) + + def test_temporary_redirect(self): + "GET a URL that does a non-permanent redirect" + response = self.client.get("/temporary_redirect_view/") + self.assertRedirects(response, "/get_view/", status_code=302) + + def test_redirect_to_strange_location(self): + "GET a URL that redirects to a non-200 page" + response = self.client.get("/double_redirect_view/") + # The response was a 302, and that the attempt to get the redirection + # location returned 301 when retrieved + self.assertRedirects( + response, "/permanent_redirect_view/", target_status_code=301 + ) + + def test_follow_redirect(self): + "A URL that redirects can be followed to termination." + response = self.client.get("/double_redirect_view/", follow=True) + self.assertRedirects( + response, "/get_view/", status_code=302, target_status_code=200 + ) + self.assertEqual(len(response.redirect_chain), 2) + + def test_follow_relative_redirect(self): + "A URL with a relative redirect can be followed." + response = self.client.get("/accounts/", follow=True) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.request["PATH_INFO"], "/accounts/login/") + + def test_follow_relative_redirect_no_trailing_slash(self): + "A URL with a relative redirect with no trailing slash can be followed." + response = self.client.get("/accounts/no_trailing_slash", follow=True) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.request["PATH_INFO"], "/accounts/login/") + + def test_redirect_to_querystring_only(self): + """A URL that consists of a querystring only can be followed""" + response = self.client.post("/post_then_get_view/", follow=True) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.request["PATH_INFO"], "/post_then_get_view/") + self.assertEqual(response.content, b"The value of success is true.") + + def test_follow_307_and_308_redirect(self): + """ + A 307 or 308 redirect preserves the request method after the redirect. + """ + methods = ("get", "post", "head", "options", "put", "patch", "delete", "trace") + codes = (307, 308) + for method, code in itertools.product(methods, codes): + with self.subTest(method=method, code=code): + req_method = getattr(self.client, method) + response = req_method( + "/redirect_view_%s/" % code, data={"value": "test"}, follow=True + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.request["PATH_INFO"], "/post_view/") + self.assertEqual(response.request["REQUEST_METHOD"], method.upper()) + + def test_follow_307_and_308_preserves_query_string(self): + methods = ("post", "options", "put", "patch", "delete", "trace") + codes = (307, 308) + for method, code in itertools.product(methods, codes): + with self.subTest(method=method, code=code): + req_method = getattr(self.client, method) + response = req_method( + "/redirect_view_%s_query_string/" % code, + data={"value": "test"}, + follow=True, + ) + self.assertRedirects( + response, "/post_view/?hello=world", status_code=code + ) + self.assertEqual(response.request["QUERY_STRING"], "hello=world") + + def test_follow_307_and_308_get_head_query_string(self): + methods = ("get", "head") + codes = (307, 308) + for method, code in itertools.product(methods, codes): + with self.subTest(method=method, code=code): + req_method = getattr(self.client, method) + response = req_method( + "/redirect_view_%s_query_string/" % code, + data={"value": "test"}, + follow=True, + ) + self.assertRedirects( + response, "/post_view/?hello=world", status_code=code + ) + self.assertEqual(response.request["QUERY_STRING"], "value=test") + + def test_follow_307_and_308_preserves_post_data(self): + for code in (307, 308): + with self.subTest(code=code): + response = self.client.post( + "/redirect_view_%s/" % code, data={"value": "test"}, follow=True + ) + self.assertContains(response, "test is the value") + + def test_follow_307_and_308_preserves_put_body(self): + for code in (307, 308): + with self.subTest(code=code): + response = self.client.put( + "/redirect_view_%s/?to=/put_view/" % code, data="a=b", follow=True + ) + self.assertContains(response, "a=b is the body") + + def test_follow_307_and_308_preserves_get_params(self): + data = {"var": 30, "to": "/get_view/"} + for code in (307, 308): + with self.subTest(code=code): + response = self.client.get( + "/redirect_view_%s/" % code, data=data, follow=True + ) + self.assertContains(response, "30 is the value") + + def test_redirect_http(self): + """GET a URL that redirects to an HTTP URI.""" + response = self.client.get("/http_redirect_view/", follow=True) + self.assertFalse(response.test_was_secure_request) + + def test_redirect_https(self): + """GET a URL that redirects to an HTTPS URI.""" + response = self.client.get("/https_redirect_view/", follow=True) + self.assertTrue(response.test_was_secure_request) + + def test_notfound_response(self): + "GET a URL that responds as '404:Not Found'" + response = self.client.get("/bad_view/") + self.assertContains(response, "MAGIC", status_code=404) + + def test_valid_form(self): + "POST valid data to a form" + post_data = { + "text": "Hello World", + "email": "foo@example.com", + "value": 37, + "single": "b", + "multi": ("b", "c", "e"), + } + response = self.client.post("/form_view/", post_data) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, "Valid POST Template") + + def test_valid_form_with_hints(self): + "GET a form, providing hints in the GET data" + hints = {"text": "Hello World", "multi": ("b", "c", "e")} + response = self.client.get("/form_view/", data=hints) + # The multi-value data has been rolled out ok + self.assertContains(response, "Select a valid choice.", 0) + self.assertTemplateUsed(response, "Form GET Template") + + def test_incomplete_data_form(self): + "POST incomplete data to a form" + post_data = {"text": "Hello World", "value": 37} + response = self.client.post("/form_view/", post_data) + self.assertContains(response, "This field is required.", 3) + self.assertTemplateUsed(response, "Invalid POST Template") + form = response.context["form"] + self.assertFormError(form, "email", "This field is required.") + self.assertFormError(form, "single", "This field is required.") + self.assertFormError(form, "multi", "This field is required.") + + def test_form_error(self): + "POST erroneous data to a form" + post_data = { + "text": "Hello World", + "email": "not an email address", + "value": 37, + "single": "b", + "multi": ("b", "c", "e"), + } + response = self.client.post("/form_view/", post_data) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, "Invalid POST Template") + + self.assertFormError( + response.context["form"], "email", "Enter a valid email address." + ) + + def test_valid_form_with_template(self): + "POST valid data to a form using multiple templates" + post_data = { + "text": "Hello World", + "email": "foo@example.com", + "value": 37, + "single": "b", + "multi": ("b", "c", "e"), + } + response = self.client.post("/form_view_with_template/", post_data) + self.assertContains(response, "POST data OK") + self.assertTemplateUsed(response, "form_view.html") + self.assertTemplateUsed(response, "base.html") + self.assertTemplateNotUsed(response, "Valid POST Template") + + def test_incomplete_data_form_with_template(self): + "POST incomplete data to a form using multiple templates" + post_data = {"text": "Hello World", "value": 37} + response = self.client.post("/form_view_with_template/", post_data) + self.assertContains(response, "POST data has errors") + self.assertTemplateUsed(response, "form_view.html") + self.assertTemplateUsed(response, "base.html") + self.assertTemplateNotUsed(response, "Invalid POST Template") + form = response.context["form"] + self.assertFormError(form, "email", "This field is required.") + self.assertFormError(form, "single", "This field is required.") + self.assertFormError(form, "multi", "This field is required.") + + def test_form_error_with_template(self): + "POST erroneous data to a form using multiple templates" + post_data = { + "text": "Hello World", + "email": "not an email address", + "value": 37, + "single": "b", + "multi": ("b", "c", "e"), + } + response = self.client.post("/form_view_with_template/", post_data) + self.assertContains(response, "POST data has errors") + self.assertTemplateUsed(response, "form_view.html") + self.assertTemplateUsed(response, "base.html") + self.assertTemplateNotUsed(response, "Invalid POST Template") + + self.assertFormError( + response.context["form"], "email", "Enter a valid email address." + ) + + def test_unknown_page(self): + "GET an invalid URL" + response = self.client.get("/unknown_view/") + + # The response was a 404 + self.assertEqual(response.status_code, 404) + + def test_url_parameters(self): + "Make sure that URL ;-parameters are not stripped." + response = self.client.get("/unknown_view/;some-parameter") + + # The path in the response includes it (ignore that it's a 404) + self.assertEqual(response.request["PATH_INFO"], "/unknown_view/;some-parameter") + + def test_view_with_login(self): + "Request a page that is protected with @login_required" + + # Get the page without logging in. Should result in 302. + response = self.client.get("/login_protected_view/") + self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/") + + # Log in + login = self.client.login(username="testclient", password="password") + self.assertTrue(login, "Could not log in") + + # Request a page that requires a login + response = self.client.get("/login_protected_view/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "testclient") + + @override_settings( + INSTALLED_APPS=["django.contrib.auth"], + SESSION_ENGINE="django.contrib.sessions.backends.file", + ) + def test_view_with_login_when_sessions_app_is_not_installed(self): + self.test_view_with_login() + + def test_view_with_force_login(self): + "Request a page that is protected with @login_required" + # Get the page without logging in. Should result in 302. + response = self.client.get("/login_protected_view/") + self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/") + + # Log in + self.client.force_login(self.u1) + + # Request a page that requires a login + response = self.client.get("/login_protected_view/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "testclient") + + def test_view_with_method_login(self): + "Request a page that is protected with a @login_required method" + + # Get the page without logging in. Should result in 302. + response = self.client.get("/login_protected_method_view/") + self.assertRedirects( + response, "/accounts/login/?next=/login_protected_method_view/" + ) + + # Log in + login = self.client.login(username="testclient", password="password") + self.assertTrue(login, "Could not log in") + + # Request a page that requires a login + response = self.client.get("/login_protected_method_view/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "testclient") + + def test_view_with_method_force_login(self): + "Request a page that is protected with a @login_required method" + # Get the page without logging in. Should result in 302. + response = self.client.get("/login_protected_method_view/") + self.assertRedirects( + response, "/accounts/login/?next=/login_protected_method_view/" + ) + + # Log in + self.client.force_login(self.u1) + + # Request a page that requires a login + response = self.client.get("/login_protected_method_view/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "testclient") + + def test_view_with_login_and_custom_redirect(self): + """ + Request a page that is protected with + @login_required(redirect_field_name='redirect_to') + """ + + # Get the page without logging in. Should result in 302. + response = self.client.get("/login_protected_view_custom_redirect/") + self.assertRedirects( + response, + "/accounts/login/?redirect_to=/login_protected_view_custom_redirect/", + ) + + # Log in + login = self.client.login(username="testclient", password="password") + self.assertTrue(login, "Could not log in") + + # Request a page that requires a login + response = self.client.get("/login_protected_view_custom_redirect/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "testclient") + + def test_view_with_force_login_and_custom_redirect(self): + """ + Request a page that is protected with + @login_required(redirect_field_name='redirect_to') + """ + # Get the page without logging in. Should result in 302. + response = self.client.get("/login_protected_view_custom_redirect/") + self.assertRedirects( + response, + "/accounts/login/?redirect_to=/login_protected_view_custom_redirect/", + ) + + # Log in + self.client.force_login(self.u1) + + # Request a page that requires a login + response = self.client.get("/login_protected_view_custom_redirect/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "testclient") + + def test_view_with_bad_login(self): + "Request a page that is protected with @login, but use bad credentials" + + login = self.client.login(username="otheruser", password="nopassword") + self.assertFalse(login) + + def test_view_with_inactive_login(self): + """ + An inactive user may login if the authenticate backend allows it. + """ + credentials = {"username": "inactive", "password": "password"} + self.assertFalse(self.client.login(**credentials)) + + with self.settings( + AUTHENTICATION_BACKENDS=[ + "django.contrib.auth.backends.AllowAllUsersModelBackend" + ] + ): + self.assertTrue(self.client.login(**credentials)) + + @override_settings( + AUTHENTICATION_BACKENDS=[ + "django.contrib.auth.backends.ModelBackend", + "django.contrib.auth.backends.AllowAllUsersModelBackend", + ] + ) + def test_view_with_inactive_force_login(self): + "Request a page that is protected with @login, but use an inactive login" + + # Get the page without logging in. Should result in 302. + response = self.client.get("/login_protected_view/") + self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/") + + # Log in + self.client.force_login( + self.u2, backend="django.contrib.auth.backends.AllowAllUsersModelBackend" + ) + + # Request a page that requires a login + response = self.client.get("/login_protected_view/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "inactive") + + def test_logout(self): + "Request a logout after logging in" + # Log in + self.client.login(username="testclient", password="password") + + # Request a page that requires a login + response = self.client.get("/login_protected_view/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "testclient") + + # Log out + self.client.logout() + + # Request a page that requires a login + response = self.client.get("/login_protected_view/") + self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/") + + def test_logout_with_force_login(self): + "Request a logout after logging in" + # Log in + self.client.force_login(self.u1) + + # Request a page that requires a login + response = self.client.get("/login_protected_view/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "testclient") + + # Log out + self.client.logout() + + # Request a page that requires a login + response = self.client.get("/login_protected_view/") + self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/") + + @override_settings( + AUTHENTICATION_BACKENDS=[ + "django.contrib.auth.backends.ModelBackend", + "test_client.auth_backends.TestClientBackend", + ], + ) + def test_force_login_with_backend(self): + """ + Request a page that is protected with @login_required when using + force_login() and passing a backend. + """ + # Get the page without logging in. Should result in 302. + response = self.client.get("/login_protected_view/") + self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/") + + # Log in + self.client.force_login( + self.u1, backend="test_client.auth_backends.TestClientBackend" + ) + self.assertEqual(self.u1.backend, "test_client.auth_backends.TestClientBackend") + + # Request a page that requires a login + response = self.client.get("/login_protected_view/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "testclient") + + @override_settings( + AUTHENTICATION_BACKENDS=[ + "django.contrib.auth.backends.ModelBackend", + "test_client.auth_backends.TestClientBackend", + ], + ) + def test_force_login_without_backend(self): + """ + force_login() without passing a backend and with multiple backends + configured should automatically use the first backend. + """ + self.client.force_login(self.u1) + response = self.client.get("/login_protected_view/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "testclient") + self.assertEqual(self.u1.backend, "django.contrib.auth.backends.ModelBackend") + + @override_settings( + AUTHENTICATION_BACKENDS=[ + "test_client.auth_backends.BackendWithoutGetUserMethod", + "django.contrib.auth.backends.ModelBackend", + ] + ) + def test_force_login_with_backend_missing_get_user(self): + """ + force_login() skips auth backends without a get_user() method. + """ + self.client.force_login(self.u1) + self.assertEqual(self.u1.backend, "django.contrib.auth.backends.ModelBackend") + + @override_settings(SESSION_ENGINE="django.contrib.sessions.backends.signed_cookies") + def test_logout_cookie_sessions(self): + self.test_logout() + + def test_view_with_permissions(self): + "Request a page that is protected with @permission_required" + + # Get the page without logging in. Should result in 302. + response = self.client.get("/permission_protected_view/") + self.assertRedirects( + response, "/accounts/login/?next=/permission_protected_view/" + ) + + # Log in + login = self.client.login(username="testclient", password="password") + self.assertTrue(login, "Could not log in") + + # Log in with wrong permissions. Should result in 302. + response = self.client.get("/permission_protected_view/") + self.assertRedirects( + response, "/accounts/login/?next=/permission_protected_view/" + ) + + # TODO: Log in with right permissions and request the page again + + def test_view_with_permissions_exception(self): + """ + Request a page that is protected with @permission_required but raises + an exception. + """ + + # Get the page without logging in. Should result in 403. + response = self.client.get("/permission_protected_view_exception/") + self.assertEqual(response.status_code, 403) + + # Log in + login = self.client.login(username="testclient", password="password") + self.assertTrue(login, "Could not log in") + + # Log in with wrong permissions. Should result in 403. + response = self.client.get("/permission_protected_view_exception/") + self.assertEqual(response.status_code, 403) + + def test_view_with_method_permissions(self): + "Request a page that is protected with a @permission_required method" + + # Get the page without logging in. Should result in 302. + response = self.client.get("/permission_protected_method_view/") + self.assertRedirects( + response, "/accounts/login/?next=/permission_protected_method_view/" + ) + + # Log in + login = self.client.login(username="testclient", password="password") + self.assertTrue(login, "Could not log in") + + # Log in with wrong permissions. Should result in 302. + response = self.client.get("/permission_protected_method_view/") + self.assertRedirects( + response, "/accounts/login/?next=/permission_protected_method_view/" + ) + + # TODO: Log in with right permissions and request the page again + + def test_external_redirect(self): + response = self.client.get("/django_project_redirect/") + self.assertRedirects( + response, "https://www.djangoproject.com/", fetch_redirect_response=False + ) + + def test_external_redirect_without_trailing_slash(self): + """ + Client._handle_redirects() with an empty path. + """ + response = self.client.get("/no_trailing_slash_external_redirect/", follow=True) + self.assertRedirects(response, "https://testserver") + + def test_external_redirect_with_fetch_error_msg(self): + """ + assertRedirects without fetch_redirect_response=False raises + a relevant ValueError rather than a non-descript AssertionError. + """ + response = self.client.get("/django_project_redirect/") + msg = ( + "The test client is unable to fetch remote URLs (got " + "https://www.djangoproject.com/). If the host is served by Django, " + "add 'www.djangoproject.com' to ALLOWED_HOSTS. " + "Otherwise, use assertRedirects(..., fetch_redirect_response=False)." + ) + with self.assertRaisesMessage(ValueError, msg): + self.assertRedirects(response, "https://www.djangoproject.com/") + + def test_session_modifying_view(self): + "Request a page that modifies the session" + # Session value isn't set initially + with self.assertRaises(KeyError): + self.client.session["tobacconist"] + + self.client.post("/session_view/") + # The session was modified + self.assertEqual(self.client.session["tobacconist"], "hovercraft") + + @override_settings( + INSTALLED_APPS=[], + SESSION_ENGINE="django.contrib.sessions.backends.file", + ) + def test_sessions_app_is_not_installed(self): + self.test_session_modifying_view() + + @override_settings( + INSTALLED_APPS=[], + SESSION_ENGINE="django.contrib.sessions.backends.nonexistent", + ) + def test_session_engine_is_invalid(self): + with self.assertRaisesMessage(ImportError, "nonexistent"): + self.test_session_modifying_view() + + def test_view_with_exception(self): + "Request a page that is known to throw an error" + with self.assertRaises(KeyError): + self.client.get("/broken_view/") + + def test_exc_info(self): + client = Client(raise_request_exception=False) + response = client.get("/broken_view/") + self.assertEqual(response.status_code, 500) + exc_type, exc_value, exc_traceback = response.exc_info + self.assertIs(exc_type, KeyError) + self.assertIsInstance(exc_value, KeyError) + self.assertEqual(str(exc_value), "'Oops! Looks like you wrote some bad code.'") + self.assertIsNotNone(exc_traceback) + + def test_exc_info_none(self): + response = self.client.get("/get_view/") + self.assertIsNone(response.exc_info) + + def test_mail_sending(self): + "Mail is redirected to a dummy outbox during test setup" + response = self.client.get("/mail_sending_view/") + self.assertEqual(response.status_code, 200) + + self.assertEqual(len(mail.outbox), 1) + self.assertEqual(mail.outbox[0].subject, "Test message") + self.assertEqual(mail.outbox[0].body, "This is a test email") + self.assertEqual(mail.outbox[0].from_email, "from@example.com") + self.assertEqual(mail.outbox[0].to[0], "first@example.com") + self.assertEqual(mail.outbox[0].to[1], "second@example.com") + + def test_reverse_lazy_decodes(self): + "reverse_lazy() works in the test client" + data = {"var": "data"} + response = self.client.get(reverse_lazy("get_view"), data) + + # Check some response details + self.assertContains(response, "This is a test") + + def test_relative_redirect(self): + response = self.client.get("/accounts/") + self.assertRedirects(response, "/accounts/login/") + + def test_relative_redirect_no_trailing_slash(self): + response = self.client.get("/accounts/no_trailing_slash") + self.assertRedirects(response, "/accounts/login/") + + def test_mass_mail_sending(self): + "Mass mail is redirected to a dummy outbox during test setup" + response = self.client.get("/mass_mail_sending_view/") + self.assertEqual(response.status_code, 200) + + self.assertEqual(len(mail.outbox), 2) + self.assertEqual(mail.outbox[0].subject, "First Test message") + self.assertEqual(mail.outbox[0].body, "This is the first test email") + self.assertEqual(mail.outbox[0].from_email, "from@example.com") + self.assertEqual(mail.outbox[0].to[0], "first@example.com") + self.assertEqual(mail.outbox[0].to[1], "second@example.com") + + self.assertEqual(mail.outbox[1].subject, "Second Test message") + self.assertEqual(mail.outbox[1].body, "This is the second test email") + self.assertEqual(mail.outbox[1].from_email, "from@example.com") + self.assertEqual(mail.outbox[1].to[0], "second@example.com") + self.assertEqual(mail.outbox[1].to[1], "third@example.com") + + def test_exception_following_nested_client_request(self): + """ + A nested test client request shouldn't clobber exception signals from + the outer client request. + """ + with self.assertRaisesMessage(Exception, "exception message"): + self.client.get("/nesting_exception_view/") + + def test_response_raises_multi_arg_exception(self): + """A request may raise an exception with more than one required arg.""" + with self.assertRaises(TwoArgException) as cm: + self.client.get("/two_arg_exception/") + self.assertEqual(cm.exception.args, ("one", "two")) + + def test_uploading_temp_file(self): + with tempfile.TemporaryFile() as test_file: + response = self.client.post("/upload_view/", data={"temp_file": test_file}) + self.assertEqual(response.content, b"temp_file") + + def test_uploading_named_temp_file(self): + with tempfile.NamedTemporaryFile() as test_file: + response = self.client.post( + "/upload_view/", + data={"named_temp_file": test_file}, + ) + self.assertEqual(response.content, b"named_temp_file") + + +@override_settings( + MIDDLEWARE=["django.middleware.csrf.CsrfViewMiddleware"], + ROOT_URLCONF="test_client.urls", +) +class CSRFEnabledClientTests(SimpleTestCase): + def test_csrf_enabled_client(self): + "A client can be instantiated with CSRF checks enabled" + csrf_client = Client(enforce_csrf_checks=True) + # The normal client allows the post + response = self.client.post("/post_view/", {}) + self.assertEqual(response.status_code, 200) + # The CSRF-enabled client rejects it + response = csrf_client.post("/post_view/", {}) + self.assertEqual(response.status_code, 403) + + +class CustomTestClient(Client): + i_am_customized = "Yes" + + +class CustomTestClientTest(SimpleTestCase): + client_class = CustomTestClient + + def test_custom_test_client(self): + """A test case can specify a custom class for self.client.""" + self.assertIs(hasattr(self.client, "i_am_customized"), True) + + +def _generic_view(request): + return HttpResponse(status=200) + + +@override_settings(ROOT_URLCONF="test_client.urls") +class RequestFactoryTest(SimpleTestCase): + """Tests for the request factory.""" + + # A mapping between names of HTTP/1.1 methods and their test views. + http_methods_and_views = ( + ("get", get_view), + ("post", post_view), + ("put", _generic_view), + ("patch", _generic_view), + ("delete", _generic_view), + ("head", _generic_view), + ("options", _generic_view), + ("trace", trace_view), + ) + request_factory = RequestFactory() + + def test_request_factory(self): + """The request factory implements all the HTTP/1.1 methods.""" + for method_name, view in self.http_methods_and_views: + method = getattr(self.request_factory, method_name) + request = method("/somewhere/") + response = view(request) + self.assertEqual(response.status_code, 200) + + def test_get_request_from_factory(self): + """ + The request factory returns a templated response for a GET request. + """ + request = self.request_factory.get("/somewhere/") + response = get_view(request) + self.assertContains(response, "This is a test") + + def test_trace_request_from_factory(self): + """The request factory returns an echo response for a TRACE request.""" + url_path = "/somewhere/" + request = self.request_factory.trace(url_path) + response = trace_view(request) + protocol = request.META["SERVER_PROTOCOL"] + echoed_request_line = "TRACE {} {}".format(url_path, protocol) + self.assertContains(response, echoed_request_line) + + def test_request_factory_default_headers(self): + request = RequestFactory( + headers={ + "authorization": "Bearer faketoken", + "x-another-header": "some other value", + } + ).get("/somewhere/") + self.assertEqual(request.headers["authorization"], "Bearer faketoken") + self.assertIn("HTTP_AUTHORIZATION", request.META) + self.assertEqual(request.headers["x-another-header"], "some other value") + self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + + request = RequestFactory( + headers={ + "Authorization": "Bearer faketoken", + "X-Another-Header": "some other value", + } + ).get("/somewhere/") + self.assertEqual(request.headers["authorization"], "Bearer faketoken") + self.assertIn("HTTP_AUTHORIZATION", request.META) + self.assertEqual(request.headers["x-another-header"], "some other value") + self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + + def test_request_factory_sets_headers(self): + for method_name, view in self.http_methods_and_views: + method = getattr(self.request_factory, method_name) + request = method( + "/somewhere/", + headers={ + "authorization": "Bearer faketoken", + "x-another-header": "some other value", + }, + ) + self.assertEqual(request.headers["authorization"], "Bearer faketoken") + self.assertIn("HTTP_AUTHORIZATION", request.META) + self.assertEqual(request.headers["x-another-header"], "some other value") + self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + + request = method( + "/somewhere/", + headers={ + "Authorization": "Bearer faketoken", + "X-Another-Header": "some other value", + }, + ) + self.assertEqual(request.headers["authorization"], "Bearer faketoken") + self.assertIn("HTTP_AUTHORIZATION", request.META) + self.assertEqual(request.headers["x-another-header"], "some other value") + self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + + +@override_settings(ROOT_URLCONF="test_client.urls") +class AsyncClientTest(TestCase): + async def test_response_resolver_match(self): + response = await self.async_client.get("/async_get_view/") + self.assertTrue(hasattr(response, "resolver_match")) + self.assertEqual(response.resolver_match.url_name, "async_get_view") + + @modify_settings( + MIDDLEWARE={"prepend": "test_client.tests.async_middleware_urlconf"}, + ) + async def test_response_resolver_match_middleware_urlconf(self): + response = await self.async_client.get("/middleware_urlconf_view/") + self.assertEqual(response.resolver_match.url_name, "middleware_urlconf_view") + + async def test_follow_parameter_not_implemented(self): + msg = "AsyncClient request methods do not accept the follow parameter." + tests = ( + "get", + "post", + "put", + "patch", + "delete", + "head", + "options", + "trace", + ) + for method_name in tests: + with self.subTest(method=method_name): + method = getattr(self.async_client, method_name) + with self.assertRaisesMessage(NotImplementedError, msg): + await method("/redirect_view/", follow=True) + + async def test_get_data(self): + response = await self.async_client.get("/get_view/", {"var": "val"}) + self.assertContains(response, "This is a test. val is the value.") + + async def test_post_data(self): + response = await self.async_client.post("/post_view/", {"value": 37}) + self.assertContains(response, "Data received: 37 is the value.") + + async def test_body_read_on_get_data(self): + response = await self.async_client.get("/post_view/") + self.assertContains(response, "Viewing GET page.") + + +@override_settings(ROOT_URLCONF="test_client.urls") +class AsyncRequestFactoryTest(SimpleTestCase): + request_factory = AsyncRequestFactory() + + async def test_request_factory(self): + tests = ( + "get", + "post", + "put", + "patch", + "delete", + "head", + "options", + "trace", + ) + for method_name in tests: + with self.subTest(method=method_name): + + async def async_generic_view(request): + if request.method.lower() != method_name: + return HttpResponseNotAllowed(method_name) + return HttpResponse(status=200) + + method = getattr(self.request_factory, method_name) + request = method("/somewhere/") + response = await async_generic_view(request) + self.assertEqual(response.status_code, 200) + + async def test_request_factory_data(self): + async def async_generic_view(request): + return HttpResponse(status=200, content=request.body) + + request = self.request_factory.post( + "/somewhere/", + data={"example": "data"}, + content_type="application/json", + ) + self.assertEqual(request.headers["content-length"], "19") + self.assertEqual(request.headers["content-type"], "application/json") + response = await async_generic_view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b'{"example": "data"}') + + async def test_request_limited_read(self): + tests = ["GET", "POST"] + for method in tests: + with self.subTest(method=method): + request = self.request_factory.generic( + method, + "/somewhere", + ) + self.assertEqual(request.read(200), b"") + + def test_request_factory_sets_headers(self): + request = self.request_factory.get( + "/somewhere/", + AUTHORIZATION="Bearer faketoken", + X_ANOTHER_HEADER="some other value", + ) + self.assertEqual(request.headers["authorization"], "Bearer faketoken") + self.assertIn("HTTP_AUTHORIZATION", request.META) + self.assertEqual(request.headers["x-another-header"], "some other value") + self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + + request = self.request_factory.get( + "/somewhere/", + headers={ + "Authorization": "Bearer faketoken", + "X-Another-Header": "some other value", + }, + ) + self.assertEqual(request.headers["authorization"], "Bearer faketoken") + self.assertIn("HTTP_AUTHORIZATION", request.META) + self.assertEqual(request.headers["x-another-header"], "some other value") + self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + + def test_request_factory_query_string(self): + request = self.request_factory.get("/somewhere/", {"example": "data"}) + self.assertNotIn("Query-String", request.headers) + self.assertEqual(request.GET["example"], "data") diff --git a/testbed/django__django/tests/test_client/urls.py b/testbed/django__django/tests/test_client/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..228e6c6a78c5198c2864e9c6febf5ff58d4f0690 --- /dev/null +++ b/testbed/django__django/tests/test_client/urls.py @@ -0,0 +1,79 @@ +from django.contrib.auth import views as auth_views +from django.urls import path +from django.views.generic import RedirectView + +from . import views + +urlpatterns = [ + path("upload_view/", views.upload_view, name="upload_view"), + path("get_view/", views.get_view, name="get_view"), + path("cbv_view/", views.CBView.as_view()), + path("post_view/", views.post_view), + path("post_then_get_view/", views.post_then_get_view), + path("put_view/", views.put_view), + path("trace_view/", views.trace_view), + path("header_view/", views.view_with_header), + path("raw_post_view/", views.raw_post_view), + path("redirect_view/", views.redirect_view), + path("redirect_view_307/", views.method_saving_307_redirect_view), + path( + "redirect_view_307_query_string/", + views.method_saving_307_redirect_query_string_view, + ), + path("redirect_view_308/", views.method_saving_308_redirect_view), + path( + "redirect_view_308_query_string/", + views.method_saving_308_redirect_query_string_view, + ), + path("secure_view/", views.view_with_secure), + path( + "permanent_redirect_view/", + RedirectView.as_view(url="/get_view/", permanent=True), + ), + path( + "temporary_redirect_view/", + RedirectView.as_view(url="/get_view/", permanent=False), + ), + path("http_redirect_view/", RedirectView.as_view(url="/secure_view/")), + path( + "https_redirect_view/", + RedirectView.as_view(url="https://testserver/secure_view/"), + ), + path("double_redirect_view/", views.double_redirect_view), + path("bad_view/", views.bad_view), + path("form_view/", views.form_view), + path("form_view_with_template/", views.form_view_with_template), + path("json_view/", views.json_view), + path("login_protected_view/", views.login_protected_view), + path("login_protected_method_view/", views.login_protected_method_view), + path( + "login_protected_view_custom_redirect/", + views.login_protected_view_changed_redirect, + ), + path("permission_protected_view/", views.permission_protected_view), + path( + "permission_protected_view_exception/", + views.permission_protected_view_exception, + ), + path("permission_protected_method_view/", views.permission_protected_method_view), + path("session_view/", views.session_view), + path("broken_view/", views.broken_view), + path("mail_sending_view/", views.mail_sending_view), + path("mass_mail_sending_view/", views.mass_mail_sending_view), + path("nesting_exception_view/", views.nesting_exception_view), + path("django_project_redirect/", views.django_project_redirect), + path( + "no_trailing_slash_external_redirect/", + views.no_trailing_slash_external_redirect, + ), + path( + "", views.index_view, name="index" + ), # Target for no_trailing_slash_external_redirect/ with follow=True + path("two_arg_exception/", views.two_arg_exception), + path("accounts/", RedirectView.as_view(url="login/")), + path("accounts/no_trailing_slash", RedirectView.as_view(url="login/")), + path("accounts/login/", auth_views.LoginView.as_view(template_name="login.html")), + path("accounts/logout/", auth_views.LogoutView.as_view()), + # Async views. + path("async_get_view/", views.async_get_view, name="async_get_view"), +] diff --git a/testbed/django__django/tests/test_client/urls_middleware_urlconf.py b/testbed/django__django/tests/test_client/urls_middleware_urlconf.py new file mode 100644 index 0000000000000000000000000000000000000000..106d9e65844577708199670e53756bda8f9d45de --- /dev/null +++ b/testbed/django__django/tests/test_client/urls_middleware_urlconf.py @@ -0,0 +1,11 @@ +from django.http import HttpResponse +from django.urls import path + + +def empty_response(request): + return HttpResponse() + + +urlpatterns = [ + path("middleware_urlconf_view/", empty_response, name="middleware_urlconf_view"), +] diff --git a/testbed/django__django/tests/test_client/views.py b/testbed/django__django/tests/test_client/views.py new file mode 100644 index 0000000000000000000000000000000000000000..494844009de21b9260287b894f7bd62eaec2d32c --- /dev/null +++ b/testbed/django__django/tests/test_client/views.py @@ -0,0 +1,426 @@ +import json +from urllib.parse import urlencode +from xml.dom.minidom import parseString + +from django.contrib.auth.decorators import login_required, permission_required +from django.core import mail +from django.core.exceptions import ValidationError +from django.forms import fields +from django.forms.forms import Form +from django.http import ( + HttpResponse, + HttpResponseBadRequest, + HttpResponseNotAllowed, + HttpResponseNotFound, + HttpResponseRedirect, +) +from django.shortcuts import render +from django.template import Context, Template +from django.test import Client +from django.utils.decorators import method_decorator +from django.views.generic import TemplateView + + +def get_view(request): + "A simple view that expects a GET request, and returns a rendered template" + t = Template("This is a test. {{ var }} is the value.", name="GET Template") + c = Context({"var": request.GET.get("var", 42)}) + + return HttpResponse(t.render(c)) + + +async def async_get_view(request): + return HttpResponse(b"GET content.") + + +def trace_view(request): + """ + A simple view that expects a TRACE request and echoes its status line. + + TRACE requests should not have an entity; the view will return a 400 status + response if it is present. + """ + if request.method.upper() != "TRACE": + return HttpResponseNotAllowed("TRACE") + elif request.body: + return HttpResponseBadRequest("TRACE requests MUST NOT include an entity") + else: + protocol = request.META["SERVER_PROTOCOL"] + t = Template( + "{{ method }} {{ uri }} {{ version }}", + name="TRACE Template", + ) + c = Context( + { + "method": request.method, + "uri": request.path, + "version": protocol, + } + ) + return HttpResponse(t.render(c)) + + +def put_view(request): + if request.method == "PUT": + t = Template("Data received: {{ data }} is the body.", name="PUT Template") + c = Context( + { + "Content-Length": request.META["CONTENT_LENGTH"], + "data": request.body.decode(), + } + ) + else: + t = Template("Viewing GET page.", name="Empty GET Template") + c = Context() + return HttpResponse(t.render(c)) + + +def post_view(request): + """A view that expects a POST, and returns a different template depending + on whether any POST data is available + """ + if request.method == "POST": + if request.POST: + t = Template( + "Data received: {{ data }} is the value.", name="POST Template" + ) + c = Context({"data": request.POST["value"]}) + else: + t = Template("Viewing POST page.", name="Empty POST Template") + c = Context() + else: + t = Template("Viewing GET page.", name="Empty GET Template") + # Used by test_body_read_on_get_data. + request.read(200) + c = Context() + return HttpResponse(t.render(c)) + + +def post_then_get_view(request): + """ + A view that expects a POST request, returns a redirect response + to itself providing only a ?success=true querystring, + the value of this querystring is then rendered upon GET. + """ + if request.method == "POST": + return HttpResponseRedirect("?success=true") + + t = Template("The value of success is {{ value }}.", name="GET Template") + c = Context({"value": request.GET.get("success", "false")}) + + return HttpResponse(t.render(c)) + + +def json_view(request): + """ + A view that expects a request with the header 'application/json' and JSON + data, which is deserialized and included in the context. + """ + if request.META.get("CONTENT_TYPE") != "application/json": + return HttpResponse() + + t = Template("Viewing {} page. With data {{ data }}.".format(request.method)) + data = json.loads(request.body.decode("utf-8")) + c = Context({"data": data}) + return HttpResponse(t.render(c)) + + +def view_with_header(request): + "A view that has a custom header" + response = HttpResponse() + response.headers["X-DJANGO-TEST"] = "Slartibartfast" + return response + + +def raw_post_view(request): + """A view which expects raw XML to be posted and returns content extracted + from the XML""" + if request.method == "POST": + root = parseString(request.body) + first_book = root.firstChild.firstChild + title, author = [n.firstChild.nodeValue for n in first_book.childNodes] + t = Template("{{ title }} - {{ author }}", name="Book template") + c = Context({"title": title, "author": author}) + else: + t = Template("GET request.", name="Book GET template") + c = Context() + + return HttpResponse(t.render(c)) + + +def redirect_view(request): + "A view that redirects all requests to the GET view" + if request.GET: + query = "?" + urlencode(request.GET, True) + else: + query = "" + return HttpResponseRedirect("/get_view/" + query) + + +def method_saving_307_redirect_query_string_view(request): + return HttpResponseRedirect("/post_view/?hello=world", status=307) + + +def method_saving_308_redirect_query_string_view(request): + return HttpResponseRedirect("/post_view/?hello=world", status=308) + + +def _post_view_redirect(request, status_code): + """Redirect to /post_view/ using the status code.""" + redirect_to = request.GET.get("to", "/post_view/") + return HttpResponseRedirect(redirect_to, status=status_code) + + +def method_saving_307_redirect_view(request): + return _post_view_redirect(request, 307) + + +def method_saving_308_redirect_view(request): + return _post_view_redirect(request, 308) + + +def view_with_secure(request): + "A view that indicates if the request was secure" + response = HttpResponse() + response.test_was_secure_request = request.is_secure() + response.test_server_port = request.META.get("SERVER_PORT", 80) + return response + + +def double_redirect_view(request): + "A view that redirects all requests to a redirection view" + return HttpResponseRedirect("/permanent_redirect_view/") + + +def bad_view(request): + "A view that returns a 404 with some error content" + return HttpResponseNotFound("Not found!. This page contains some MAGIC content") + + +TestChoices = ( + ("a", "First Choice"), + ("b", "Second Choice"), + ("c", "Third Choice"), + ("d", "Fourth Choice"), + ("e", "Fifth Choice"), +) + + +class TestForm(Form): + text = fields.CharField() + email = fields.EmailField() + value = fields.IntegerField() + single = fields.ChoiceField(choices=TestChoices) + multi = fields.MultipleChoiceField(choices=TestChoices) + + def clean(self): + cleaned_data = self.cleaned_data + if cleaned_data.get("text") == "Raise non-field error": + raise ValidationError("Non-field error.") + return cleaned_data + + +def form_view(request): + "A view that tests a simple form" + if request.method == "POST": + form = TestForm(request.POST) + if form.is_valid(): + t = Template("Valid POST data.", name="Valid POST Template") + c = Context() + else: + t = Template( + "Invalid POST data. {{ form.errors }}", name="Invalid POST Template" + ) + c = Context({"form": form}) + else: + form = TestForm(request.GET) + t = Template("Viewing base form. {{ form }}.", name="Form GET Template") + c = Context({"form": form}) + + return HttpResponse(t.render(c)) + + +def form_view_with_template(request): + "A view that tests a simple form" + if request.method == "POST": + form = TestForm(request.POST) + if form.is_valid(): + message = "POST data OK" + else: + message = "POST data has errors" + else: + form = TestForm() + message = "GET form page" + return render( + request, + "form_view.html", + { + "form": form, + "message": message, + }, + ) + + +@login_required +def login_protected_view(request): + "A simple view that is login protected." + t = Template( + "This is a login protected test. Username is {{ user.username }}.", + name="Login Template", + ) + c = Context({"user": request.user}) + + return HttpResponse(t.render(c)) + + +@login_required(redirect_field_name="redirect_to") +def login_protected_view_changed_redirect(request): + "A simple view that is login protected with a custom redirect field set" + t = Template( + "This is a login protected test. Username is {{ user.username }}.", + name="Login Template", + ) + c = Context({"user": request.user}) + return HttpResponse(t.render(c)) + + +def _permission_protected_view(request): + "A simple view that is permission protected." + t = Template( + "This is a permission protected test. " + "Username is {{ user.username }}. " + "Permissions are {{ user.get_all_permissions }}.", + name="Permissions Template", + ) + c = Context({"user": request.user}) + return HttpResponse(t.render(c)) + + +permission_protected_view = permission_required("permission_not_granted")( + _permission_protected_view +) +permission_protected_view_exception = permission_required( + "permission_not_granted", raise_exception=True +)(_permission_protected_view) + + +class _ViewManager: + @method_decorator(login_required) + def login_protected_view(self, request): + t = Template( + "This is a login protected test using a method. " + "Username is {{ user.username }}.", + name="Login Method Template", + ) + c = Context({"user": request.user}) + return HttpResponse(t.render(c)) + + @method_decorator(permission_required("permission_not_granted")) + def permission_protected_view(self, request): + t = Template( + "This is a permission protected test using a method. " + "Username is {{ user.username }}. " + "Permissions are {{ user.get_all_permissions }}.", + name="Permissions Template", + ) + c = Context({"user": request.user}) + return HttpResponse(t.render(c)) + + +_view_manager = _ViewManager() +login_protected_method_view = _view_manager.login_protected_view +permission_protected_method_view = _view_manager.permission_protected_view + + +def session_view(request): + "A view that modifies the session" + request.session["tobacconist"] = "hovercraft" + + t = Template( + "This is a view that modifies the session.", + name="Session Modifying View Template", + ) + c = Context() + return HttpResponse(t.render(c)) + + +def broken_view(request): + """A view which just raises an exception, simulating a broken view.""" + raise KeyError("Oops! Looks like you wrote some bad code.") + + +def mail_sending_view(request): + mail.EmailMessage( + "Test message", + "This is a test email", + "from@example.com", + ["first@example.com", "second@example.com"], + ).send() + return HttpResponse("Mail sent") + + +def mass_mail_sending_view(request): + m1 = mail.EmailMessage( + "First Test message", + "This is the first test email", + "from@example.com", + ["first@example.com", "second@example.com"], + ) + m2 = mail.EmailMessage( + "Second Test message", + "This is the second test email", + "from@example.com", + ["second@example.com", "third@example.com"], + ) + + c = mail.get_connection() + c.send_messages([m1, m2]) + + return HttpResponse("Mail sent") + + +def nesting_exception_view(request): + """ + A view that uses a nested client to call another view and then raises an + exception. + """ + client = Client() + client.get("/get_view/") + raise Exception("exception message") + + +def django_project_redirect(request): + return HttpResponseRedirect("https://www.djangoproject.com/") + + +def no_trailing_slash_external_redirect(request): + """ + RFC 3986 Section 6.2.3: Empty path should be normalized to "/". + + Use https://testserver, rather than an external domain, in order to allow + use of follow=True, triggering Client._handle_redirects(). + """ + return HttpResponseRedirect("https://testserver") + + +def index_view(request): + """Target for no_trailing_slash_external_redirect with follow=True.""" + return HttpResponse("Hello world") + + +def upload_view(request): + """Prints keys of request.FILES to the response.""" + return HttpResponse(", ".join(request.FILES)) + + +class TwoArgException(Exception): + def __init__(self, one, two): + pass + + +def two_arg_exception(request): + raise TwoArgException("one", "two") + + +class CBView(TemplateView): + template_name = "base.html" diff --git a/testbed/django__django/tests/test_client_regress/__init__.py b/testbed/django__django/tests/test_client_regress/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_client_regress/auth_backends.py b/testbed/django__django/tests/test_client_regress/auth_backends.py new file mode 100644 index 0000000000000000000000000000000000000000..6a9f9de43af381f8f8b550bee9196e2db5fb4fde --- /dev/null +++ b/testbed/django__django/tests/test_client_regress/auth_backends.py @@ -0,0 +1,19 @@ +from django.contrib.auth.backends import ModelBackend + +from .models import CustomUser + + +class CustomUserBackend(ModelBackend): + def authenticate(self, request, username=None, password=None): + try: + user = CustomUser.custom_objects.get_by_natural_key(username) + if user.check_password(password): + return user + except CustomUser.DoesNotExist: + return None + + def get_user(self, user_id): + try: + return CustomUser.custom_objects.get(pk=user_id) + except CustomUser.DoesNotExist: + return None diff --git a/testbed/django__django/tests/test_client_regress/bad_templates/404.html b/testbed/django__django/tests/test_client_regress/bad_templates/404.html new file mode 100644 index 0000000000000000000000000000000000000000..816bcb9e4ecf522d1568fd6436ef718041cdaed3 --- /dev/null +++ b/testbed/django__django/tests/test_client_regress/bad_templates/404.html @@ -0,0 +1,3 @@ +{% block foo %} + +This template is deliberately bad - we want it to raise an exception when it is used. diff --git a/testbed/django__django/tests/test_client_regress/context_processors.py b/testbed/django__django/tests/test_client_regress/context_processors.py new file mode 100644 index 0000000000000000000000000000000000000000..924be26b9fb502465f1c8036a9aca9caf24bae49 --- /dev/null +++ b/testbed/django__django/tests/test_client_regress/context_processors.py @@ -0,0 +1,2 @@ +def special(request): + return {"path": request.special_path} diff --git a/testbed/django__django/tests/test_client_regress/models.py b/testbed/django__django/tests/test_client_regress/models.py new file mode 100644 index 0000000000000000000000000000000000000000..4a18828075d52409f7a8a4268c4599d7ec4b0e59 --- /dev/null +++ b/testbed/django__django/tests/test_client_regress/models.py @@ -0,0 +1,12 @@ +from django.contrib.auth.models import AbstractBaseUser, BaseUserManager +from django.db import models + + +class CustomUser(AbstractBaseUser): + email = models.EmailField(verbose_name="email address", max_length=255, unique=True) + custom_objects = BaseUserManager() + + USERNAME_FIELD = "email" + + class Meta: + app_label = "test_client_regress" diff --git a/testbed/django__django/tests/test_client_regress/session.py b/testbed/django__django/tests/test_client_regress/session.py new file mode 100644 index 0000000000000000000000000000000000000000..77a1b0df40833a9b9f46fbca673f55d4fca076f3 --- /dev/null +++ b/testbed/django__django/tests/test_client_regress/session.py @@ -0,0 +1,32 @@ +from django.contrib.sessions.backends.base import SessionBase + + +class SessionStore(SessionBase): + """ + A simple cookie-based session storage implementation. + + The session key is actually the session data, pickled and encoded. + This means that saving the session will change the session key. + """ + + def __init__(self, session_key=None): + super().__init__(session_key) + + def exists(self, session_key): + return False + + def create(self): + self._session_key = self.encode({}) + + def save(self, must_create=False): + self._session_key = self.encode(self._session) + + def delete(self, session_key=None): + self._session_key = self.encode({}) + + def load(self): + try: + return self.decode(self.session_key) + except Exception: + self.modified = True + return {} diff --git a/testbed/django__django/tests/test_client_regress/templates/request_context.html b/testbed/django__django/tests/test_client_regress/templates/request_context.html new file mode 100644 index 0000000000000000000000000000000000000000..37a2425c098aeb7f96f6ec8321defc6f0a8bfed9 --- /dev/null +++ b/testbed/django__django/tests/test_client_regress/templates/request_context.html @@ -0,0 +1 @@ +Path: {{ path }} diff --git a/testbed/django__django/tests/test_client_regress/templates/unicode.html b/testbed/django__django/tests/test_client_regress/templates/unicode.html new file mode 100644 index 0000000000000000000000000000000000000000..bdb6c91a8f6ee1bd03fa029cce0fc2ce951d1aed --- /dev/null +++ b/testbed/django__django/tests/test_client_regress/templates/unicode.html @@ -0,0 +1,5 @@ +* 峠 (とうげ tōge "mountain pass") +* 榊 (さかき sakaki "tree, genus Cleyera") +* 辻 (つじ tsuji "crossroads, street") +* 働 (どう dō, はたら hatara(ku) "work") +* 腺 (せん sen, "gland") diff --git a/testbed/django__django/tests/test_client_regress/tests.py b/testbed/django__django/tests/test_client_regress/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..726d1dcf8f146628ac922b13443c1dbf02ef49a2 --- /dev/null +++ b/testbed/django__django/tests/test_client_regress/tests.py @@ -0,0 +1,1361 @@ +""" +Regression tests for the Test Client, especially the customized assertions. +""" +import itertools +import os + +from django.contrib.auth.models import User +from django.contrib.auth.signals import user_logged_in, user_logged_out +from django.http import HttpResponse +from django.template import Context, RequestContext, TemplateSyntaxError, engines +from django.template.response import SimpleTemplateResponse +from django.test import ( + Client, + SimpleTestCase, + TestCase, + modify_settings, + override_settings, +) +from django.test.client import RedirectCycleError, RequestFactory, encode_file +from django.test.utils import ContextList +from django.urls import NoReverseMatch, reverse +from django.utils.translation import gettext_lazy + +from .models import CustomUser +from .views import CustomTestException + + +class TestDataMixin: + @classmethod + def setUpTestData(cls): + cls.u1 = User.objects.create_user(username="testclient", password="password") + cls.staff = User.objects.create_user( + username="staff", password="password", is_staff=True + ) + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class AssertContainsTests(SimpleTestCase): + def test_contains(self): + "Responses can be inspected for content, including counting repeated substrings" + response = self.client.get("/no_template_view/") + + self.assertNotContains(response, "never") + self.assertContains(response, "never", 0) + self.assertContains(response, "once") + self.assertContains(response, "once", 1) + self.assertContains(response, "twice") + self.assertContains(response, "twice", 2) + + try: + self.assertContains(response, "text", status_code=999) + except AssertionError as e: + self.assertIn( + "Couldn't retrieve content: Response code was 200 (expected 999)", + str(e), + ) + try: + self.assertContains(response, "text", status_code=999, msg_prefix="abc") + except AssertionError as e: + self.assertIn( + "abc: Couldn't retrieve content: Response code was 200 (expected 999)", + str(e), + ) + + try: + self.assertNotContains(response, "text", status_code=999) + except AssertionError as e: + self.assertIn( + "Couldn't retrieve content: Response code was 200 (expected 999)", + str(e), + ) + try: + self.assertNotContains(response, "text", status_code=999, msg_prefix="abc") + except AssertionError as e: + self.assertIn( + "abc: Couldn't retrieve content: Response code was 200 (expected 999)", + str(e), + ) + + try: + self.assertNotContains(response, "once") + except AssertionError as e: + self.assertIn("Response should not contain 'once'", str(e)) + try: + self.assertNotContains(response, "once", msg_prefix="abc") + except AssertionError as e: + self.assertIn("abc: Response should not contain 'once'", str(e)) + + try: + self.assertContains(response, "never", 1) + except AssertionError as e: + self.assertIn( + "Found 0 instances of 'never' in response (expected 1)", str(e) + ) + try: + self.assertContains(response, "never", 1, msg_prefix="abc") + except AssertionError as e: + self.assertIn( + "abc: Found 0 instances of 'never' in response (expected 1)", str(e) + ) + + try: + self.assertContains(response, "once", 0) + except AssertionError as e: + self.assertIn( + "Found 1 instances of 'once' in response (expected 0)", str(e) + ) + try: + self.assertContains(response, "once", 0, msg_prefix="abc") + except AssertionError as e: + self.assertIn( + "abc: Found 1 instances of 'once' in response (expected 0)", str(e) + ) + + try: + self.assertContains(response, "once", 2) + except AssertionError as e: + self.assertIn( + "Found 1 instances of 'once' in response (expected 2)", str(e) + ) + try: + self.assertContains(response, "once", 2, msg_prefix="abc") + except AssertionError as e: + self.assertIn( + "abc: Found 1 instances of 'once' in response (expected 2)", str(e) + ) + + try: + self.assertContains(response, "twice", 1) + except AssertionError as e: + self.assertIn( + "Found 2 instances of 'twice' in response (expected 1)", str(e) + ) + try: + self.assertContains(response, "twice", 1, msg_prefix="abc") + except AssertionError as e: + self.assertIn( + "abc: Found 2 instances of 'twice' in response (expected 1)", str(e) + ) + + try: + self.assertContains(response, "thrice") + except AssertionError as e: + self.assertIn("Couldn't find 'thrice' in response", str(e)) + try: + self.assertContains(response, "thrice", msg_prefix="abc") + except AssertionError as e: + self.assertIn("abc: Couldn't find 'thrice' in response", str(e)) + + try: + self.assertContains(response, "thrice", 3) + except AssertionError as e: + self.assertIn( + "Found 0 instances of 'thrice' in response (expected 3)", str(e) + ) + try: + self.assertContains(response, "thrice", 3, msg_prefix="abc") + except AssertionError as e: + self.assertIn( + "abc: Found 0 instances of 'thrice' in response (expected 3)", str(e) + ) + + def test_unicode_contains(self): + "Unicode characters can be found in template context" + # Regression test for #10183 + r = self.client.get("/check_unicode/") + self.assertContains(r, "さかき") + self.assertContains(r, b"\xe5\xb3\xa0".decode()) + + def test_unicode_not_contains(self): + "Unicode characters can be searched for, and not found in template context" + # Regression test for #10183 + r = self.client.get("/check_unicode/") + self.assertNotContains(r, "はたけ") + self.assertNotContains(r, b"\xe3\x81\xaf\xe3\x81\x9f\xe3\x81\x91".decode()) + + def test_binary_contains(self): + r = self.client.get("/check_binary/") + self.assertContains(r, b"%PDF-1.4\r\n%\x93\x8c\x8b\x9e") + with self.assertRaises(AssertionError): + self.assertContains(r, b"%PDF-1.4\r\n%\x93\x8c\x8b\x9e", count=2) + + def test_binary_not_contains(self): + r = self.client.get("/check_binary/") + self.assertNotContains(r, b"%ODF-1.4\r\n%\x93\x8c\x8b\x9e") + with self.assertRaises(AssertionError): + self.assertNotContains(r, b"%PDF-1.4\r\n%\x93\x8c\x8b\x9e") + + def test_nontext_contains(self): + r = self.client.get("/no_template_view/") + self.assertContains(r, gettext_lazy("once")) + + def test_nontext_not_contains(self): + r = self.client.get("/no_template_view/") + self.assertNotContains(r, gettext_lazy("never")) + + def test_assert_contains_renders_template_response(self): + """ + An unrendered SimpleTemplateResponse may be used in assertContains(). + """ + template = engines["django"].from_string("Hello") + response = SimpleTemplateResponse(template) + self.assertContains(response, "Hello") + + def test_assert_contains_using_non_template_response(self): + """auto-rendering does not affect responses that aren't + instances (or subclasses) of SimpleTemplateResponse. + Refs #15826. + """ + response = HttpResponse("Hello") + self.assertContains(response, "Hello") + + def test_assert_not_contains_renders_template_response(self): + """ + An unrendered SimpleTemplateResponse may be used in assertNotContains(). + """ + template = engines["django"].from_string("Hello") + response = SimpleTemplateResponse(template) + self.assertNotContains(response, "Bye") + + def test_assert_not_contains_using_non_template_response(self): + """ + auto-rendering does not affect responses that aren't instances (or + subclasses) of SimpleTemplateResponse. + """ + response = HttpResponse("Hello") + self.assertNotContains(response, "Bye") + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class AssertTemplateUsedTests(TestDataMixin, TestCase): + def test_no_context(self): + "Template usage assertions work then templates aren't in use" + response = self.client.get("/no_template_view/") + + # The no template case doesn't mess with the template assertions + self.assertTemplateNotUsed(response, "GET Template") + + try: + self.assertTemplateUsed(response, "GET Template") + except AssertionError as e: + self.assertIn("No templates used to render the response", str(e)) + + try: + self.assertTemplateUsed(response, "GET Template", msg_prefix="abc") + except AssertionError as e: + self.assertIn("abc: No templates used to render the response", str(e)) + + msg = "No templates used to render the response" + with self.assertRaisesMessage(AssertionError, msg): + self.assertTemplateUsed(response, "GET Template", count=2) + + def test_single_context(self): + "Template assertions work when there is a single context" + response = self.client.get("/post_view/", {}) + msg = ( + ": Template 'Empty GET Template' was used unexpectedly in " + "rendering the response" + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertTemplateNotUsed(response, "Empty GET Template") + with self.assertRaisesMessage(AssertionError, "abc" + msg): + self.assertTemplateNotUsed(response, "Empty GET Template", msg_prefix="abc") + msg = ( + ": Template 'Empty POST Template' was not a template used to " + "render the response. Actual template(s) used: Empty GET Template" + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertTemplateUsed(response, "Empty POST Template") + with self.assertRaisesMessage(AssertionError, "abc" + msg): + self.assertTemplateUsed(response, "Empty POST Template", msg_prefix="abc") + msg = ( + ": Template 'Empty GET Template' was expected to be rendered 2 " + "time(s) but was actually rendered 1 time(s)." + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertTemplateUsed(response, "Empty GET Template", count=2) + with self.assertRaisesMessage(AssertionError, "abc" + msg): + self.assertTemplateUsed( + response, "Empty GET Template", msg_prefix="abc", count=2 + ) + + def test_multiple_context(self): + "Template assertions work when there are multiple contexts" + post_data = { + "text": "Hello World", + "email": "foo@example.com", + "value": 37, + "single": "b", + "multi": ("b", "c", "e"), + } + response = self.client.post("/form_view_with_template/", post_data) + self.assertContains(response, "POST data OK") + msg = "Template '%s' was used unexpectedly in rendering the response" + with self.assertRaisesMessage(AssertionError, msg % "form_view.html"): + self.assertTemplateNotUsed(response, "form_view.html") + with self.assertRaisesMessage(AssertionError, msg % "base.html"): + self.assertTemplateNotUsed(response, "base.html") + msg = ( + "Template 'Valid POST Template' was not a template used to render " + "the response. Actual template(s) used: form_view.html, base.html" + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertTemplateUsed(response, "Valid POST Template") + msg = ( + "Template 'base.html' was expected to be rendered 2 time(s) but " + "was actually rendered 1 time(s)." + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertTemplateUsed(response, "base.html", count=2) + + def test_template_rendered_multiple_times(self): + """Template assertions work when a template is rendered multiple times.""" + response = self.client.get("/render_template_multiple_times/") + + self.assertTemplateUsed(response, "base.html", count=2) + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class AssertRedirectsTests(SimpleTestCase): + def test_redirect_page(self): + "An assertion is raised if the original page couldn't be retrieved as expected" + # This page will redirect with code 301, not 302 + response = self.client.get("/permanent_redirect_view/") + try: + self.assertRedirects(response, "/get_view/") + except AssertionError as e: + self.assertIn( + "Response didn't redirect as expected: Response code was 301 " + "(expected 302)", + str(e), + ) + + try: + self.assertRedirects(response, "/get_view/", msg_prefix="abc") + except AssertionError as e: + self.assertIn( + "abc: Response didn't redirect as expected: Response code was 301 " + "(expected 302)", + str(e), + ) + + def test_lost_query(self): + """ + An assertion is raised if the redirect location doesn't preserve GET + parameters. + """ + response = self.client.get("/redirect_view/", {"var": "value"}) + try: + self.assertRedirects(response, "/get_view/") + except AssertionError as e: + self.assertIn( + "Response redirected to '/get_view/?var=value', expected '/get_view/'", + str(e), + ) + + try: + self.assertRedirects(response, "/get_view/", msg_prefix="abc") + except AssertionError as e: + self.assertIn( + "abc: Response redirected to '/get_view/?var=value', expected " + "'/get_view/'", + str(e), + ) + + def test_incorrect_target(self): + "An assertion is raised if the response redirects to another target" + response = self.client.get("/permanent_redirect_view/") + try: + # Should redirect to get_view + self.assertRedirects(response, "/some_view/") + except AssertionError as e: + self.assertIn( + "Response didn't redirect as expected: Response code was 301 " + "(expected 302)", + str(e), + ) + + def test_target_page(self): + """ + An assertion is raised if the response redirect target cannot be + retrieved as expected. + """ + response = self.client.get("/double_redirect_view/") + try: + # The redirect target responds with a 301 code, not 200 + self.assertRedirects(response, "http://testserver/permanent_redirect_view/") + except AssertionError as e: + self.assertIn( + "Couldn't retrieve redirection page '/permanent_redirect_view/': " + "response code was 301 (expected 200)", + str(e), + ) + + try: + # The redirect target responds with a 301 code, not 200 + self.assertRedirects( + response, "http://testserver/permanent_redirect_view/", msg_prefix="abc" + ) + except AssertionError as e: + self.assertIn( + "abc: Couldn't retrieve redirection page '/permanent_redirect_view/': " + "response code was 301 (expected 200)", + str(e), + ) + + def test_redirect_chain(self): + "You can follow a redirect chain of multiple redirects" + response = self.client.get("/redirects/further/more/", {}, follow=True) + self.assertRedirects( + response, "/no_template_view/", status_code=302, target_status_code=200 + ) + + self.assertEqual(len(response.redirect_chain), 1) + self.assertEqual(response.redirect_chain[0], ("/no_template_view/", 302)) + + def test_multiple_redirect_chain(self): + "You can follow a redirect chain of multiple redirects" + response = self.client.get("/redirects/", {}, follow=True) + self.assertRedirects( + response, "/no_template_view/", status_code=302, target_status_code=200 + ) + + self.assertEqual(len(response.redirect_chain), 3) + self.assertEqual(response.redirect_chain[0], ("/redirects/further/", 302)) + self.assertEqual(response.redirect_chain[1], ("/redirects/further/more/", 302)) + self.assertEqual(response.redirect_chain[2], ("/no_template_view/", 302)) + + def test_redirect_chain_to_non_existent(self): + "You can follow a chain to a nonexistent view." + response = self.client.get("/redirect_to_non_existent_view2/", {}, follow=True) + self.assertRedirects( + response, "/non_existent_view/", status_code=302, target_status_code=404 + ) + + def test_redirect_chain_to_self(self): + "Redirections to self are caught and escaped" + with self.assertRaises(RedirectCycleError) as context: + self.client.get("/redirect_to_self/", {}, follow=True) + response = context.exception.last_response + # The chain of redirects stops once the cycle is detected. + self.assertRedirects( + response, "/redirect_to_self/", status_code=302, target_status_code=302 + ) + self.assertEqual(len(response.redirect_chain), 2) + + def test_redirect_to_self_with_changing_query(self): + "Redirections don't loop forever even if query is changing" + with self.assertRaises(RedirectCycleError): + self.client.get( + "/redirect_to_self_with_changing_query_view/", + {"counter": "0"}, + follow=True, + ) + + def test_circular_redirect(self): + "Circular redirect chains are caught and escaped" + with self.assertRaises(RedirectCycleError) as context: + self.client.get("/circular_redirect_1/", {}, follow=True) + response = context.exception.last_response + # The chain of redirects will get back to the starting point, but stop there. + self.assertRedirects( + response, "/circular_redirect_2/", status_code=302, target_status_code=302 + ) + self.assertEqual(len(response.redirect_chain), 4) + + def test_redirect_chain_post(self): + "A redirect chain will be followed from an initial POST post" + response = self.client.post("/redirects/", {"nothing": "to_send"}, follow=True) + self.assertRedirects(response, "/no_template_view/", 302, 200) + self.assertEqual(len(response.redirect_chain), 3) + + def test_redirect_chain_head(self): + "A redirect chain will be followed from an initial HEAD request" + response = self.client.head("/redirects/", {"nothing": "to_send"}, follow=True) + self.assertRedirects(response, "/no_template_view/", 302, 200) + self.assertEqual(len(response.redirect_chain), 3) + + def test_redirect_chain_options(self): + "A redirect chain will be followed from an initial OPTIONS request" + response = self.client.options("/redirects/", follow=True) + self.assertRedirects(response, "/no_template_view/", 302, 200) + self.assertEqual(len(response.redirect_chain), 3) + + def test_redirect_chain_put(self): + "A redirect chain will be followed from an initial PUT request" + response = self.client.put("/redirects/", follow=True) + self.assertRedirects(response, "/no_template_view/", 302, 200) + self.assertEqual(len(response.redirect_chain), 3) + + def test_redirect_chain_delete(self): + "A redirect chain will be followed from an initial DELETE request" + response = self.client.delete("/redirects/", follow=True) + self.assertRedirects(response, "/no_template_view/", 302, 200) + self.assertEqual(len(response.redirect_chain), 3) + + @modify_settings(ALLOWED_HOSTS={"append": "otherserver"}) + def test_redirect_to_different_host(self): + "The test client will preserve scheme, host and port changes" + response = self.client.get("/redirect_other_host/", follow=True) + self.assertRedirects( + response, + "https://otherserver:8443/no_template_view/", + status_code=302, + target_status_code=200, + ) + # We can't use is_secure() or get_host() + # because response.request is a dictionary, not an HttpRequest + self.assertEqual(response.request.get("wsgi.url_scheme"), "https") + self.assertEqual(response.request.get("SERVER_NAME"), "otherserver") + self.assertEqual(response.request.get("SERVER_PORT"), "8443") + # assertRedirects() can follow redirect to 'otherserver' too. + response = self.client.get("/redirect_other_host/", follow=False) + self.assertRedirects( + response, + "https://otherserver:8443/no_template_view/", + status_code=302, + target_status_code=200, + ) + + def test_redirect_chain_on_non_redirect_page(self): + """ + An assertion is raised if the original page couldn't be retrieved as + expected. + """ + # This page will redirect with code 301, not 302 + response = self.client.get("/get_view/", follow=True) + try: + self.assertRedirects(response, "/get_view/") + except AssertionError as e: + self.assertIn( + "Response didn't redirect as expected: Response code was 200 " + "(expected 302)", + str(e), + ) + + try: + self.assertRedirects(response, "/get_view/", msg_prefix="abc") + except AssertionError as e: + self.assertIn( + "abc: Response didn't redirect as expected: Response code was 200 " + "(expected 302)", + str(e), + ) + + def test_redirect_on_non_redirect_page(self): + "An assertion is raised if the original page couldn't be retrieved as expected" + # This page will redirect with code 301, not 302 + response = self.client.get("/get_view/") + try: + self.assertRedirects(response, "/get_view/") + except AssertionError as e: + self.assertIn( + "Response didn't redirect as expected: Response code was 200 " + "(expected 302)", + str(e), + ) + + try: + self.assertRedirects(response, "/get_view/", msg_prefix="abc") + except AssertionError as e: + self.assertIn( + "abc: Response didn't redirect as expected: Response code was 200 " + "(expected 302)", + str(e), + ) + + def test_redirect_scheme(self): + """ + An assertion is raised if the response doesn't have the scheme + specified in expected_url. + """ + + # For all possible True/False combinations of follow and secure + for follow, secure in itertools.product([True, False], repeat=2): + # always redirects to https + response = self.client.get( + "/https_redirect_view/", follow=follow, secure=secure + ) + # the goal scheme is https + self.assertRedirects( + response, "https://testserver/secure_view/", status_code=302 + ) + with self.assertRaises(AssertionError): + self.assertRedirects( + response, "http://testserver/secure_view/", status_code=302 + ) + + def test_redirect_fetch_redirect_response(self): + """Preserve extra headers of requests made with django.test.Client.""" + methods = ( + "get", + "post", + "head", + "options", + "put", + "patch", + "delete", + "trace", + ) + for method in methods: + with self.subTest(method=method): + req_method = getattr(self.client, method) + # HTTP_REDIRECT in "extra". + response = req_method( + "/redirect_based_on_extra_headers_1/", + follow=False, + HTTP_REDIRECT="val", + ) + self.assertRedirects( + response, + "/redirect_based_on_extra_headers_2/", + fetch_redirect_response=True, + status_code=302, + target_status_code=302, + ) + # HTTP_REDIRECT in "headers". + response = req_method( + "/redirect_based_on_extra_headers_1/", + follow=False, + headers={"redirect": "val"}, + ) + self.assertRedirects( + response, + "/redirect_based_on_extra_headers_2/", + fetch_redirect_response=True, + status_code=302, + target_status_code=302, + ) + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class LoginTests(TestDataMixin, TestCase): + def test_login_different_client(self): + "Using a different test client doesn't violate authentication" + + # Create a second client, and log in. + c = Client() + login = c.login(username="testclient", password="password") + self.assertTrue(login, "Could not log in") + + # Get a redirection page with the second client. + response = c.get("/login_protected_redirect_view/") + + # At this points, the self.client isn't logged in. + # assertRedirects uses the original client, not the default client. + self.assertRedirects(response, "/get_view/") + + +@override_settings( + SESSION_ENGINE="test_client_regress.session", + ROOT_URLCONF="test_client_regress.urls", +) +class SessionEngineTests(TestDataMixin, TestCase): + def test_login(self): + "A session engine that modifies the session key can be used to log in" + login = self.client.login(username="testclient", password="password") + self.assertTrue(login, "Could not log in") + + # Try to access a login protected page. + response = self.client.get("/login_protected_view/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context["user"].username, "testclient") + + +@override_settings( + ROOT_URLCONF="test_client_regress.urls", +) +class URLEscapingTests(SimpleTestCase): + def test_simple_argument_get(self): + "Get a view that has a simple string argument" + response = self.client.get(reverse("arg_view", args=["Slartibartfast"])) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"Howdy, Slartibartfast") + + def test_argument_with_space_get(self): + "Get a view that has a string argument that requires escaping" + response = self.client.get(reverse("arg_view", args=["Arthur Dent"])) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"Hi, Arthur") + + def test_simple_argument_post(self): + "Post for a view that has a simple string argument" + response = self.client.post(reverse("arg_view", args=["Slartibartfast"])) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"Howdy, Slartibartfast") + + def test_argument_with_space_post(self): + "Post for a view that has a string argument that requires escaping" + response = self.client.post(reverse("arg_view", args=["Arthur Dent"])) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"Hi, Arthur") + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class ExceptionTests(TestDataMixin, TestCase): + def test_exception_cleared(self): + "#5836 - A stale user exception isn't re-raised by the test client." + + login = self.client.login(username="testclient", password="password") + self.assertTrue(login, "Could not log in") + with self.assertRaises(CustomTestException): + self.client.get("/staff_only/") + + # At this point, an exception has been raised, and should be cleared. + + # This next operation should be successful; if it isn't we have a problem. + login = self.client.login(username="staff", password="password") + self.assertTrue(login, "Could not log in") + self.client.get("/staff_only/") + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class TemplateExceptionTests(SimpleTestCase): + @override_settings( + TEMPLATES=[ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [os.path.join(os.path.dirname(__file__), "bad_templates")], + } + ] + ) + def test_bad_404_template(self): + "Errors found when rendering 404 error templates are re-raised" + with self.assertRaises(TemplateSyntaxError): + self.client.get("/no_such_view/") + + +# We need two different tests to check URLconf substitution - one to check +# it was changed, and another one (without self.urls) to check it was reverted on +# teardown. This pair of tests relies upon the alphabetical ordering of test execution. +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class UrlconfSubstitutionTests(SimpleTestCase): + def test_urlconf_was_changed(self): + "TestCase can enforce a custom URLconf on a per-test basis" + url = reverse("arg_view", args=["somename"]) + self.assertEqual(url, "/arg_view/somename/") + + +# This test needs to run *after* UrlconfSubstitutionTests; the zz prefix in the +# name is to ensure alphabetical ordering. +class zzUrlconfSubstitutionTests(SimpleTestCase): + def test_urlconf_was_reverted(self): + """URLconf is reverted to original value after modification in a TestCase + + This will not find a match as the default ROOT_URLCONF is empty. + """ + with self.assertRaises(NoReverseMatch): + reverse("arg_view", args=["somename"]) + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class ContextTests(TestDataMixin, TestCase): + def test_single_context(self): + "Context variables can be retrieved from a single context" + response = self.client.get("/request_data/", data={"foo": "whiz"}) + self.assertIsInstance(response.context, RequestContext) + self.assertIn("get-foo", response.context) + self.assertEqual(response.context["get-foo"], "whiz") + self.assertEqual(response.context["data"], "sausage") + + with self.assertRaisesMessage(KeyError, "does-not-exist"): + response.context["does-not-exist"] + + def test_inherited_context(self): + "Context variables can be retrieved from a list of contexts" + response = self.client.get("/request_data_extended/", data={"foo": "whiz"}) + self.assertEqual(response.context.__class__, ContextList) + self.assertEqual(len(response.context), 2) + self.assertIn("get-foo", response.context) + self.assertEqual(response.context["get-foo"], "whiz") + self.assertEqual(response.context["data"], "bacon") + + with self.assertRaisesMessage(KeyError, "does-not-exist"): + response.context["does-not-exist"] + + def test_contextlist_keys(self): + c1 = Context() + c1.update({"hello": "world", "goodbye": "john"}) + c1.update({"hello": "dolly", "dolly": "parton"}) + c2 = Context() + c2.update({"goodbye": "world", "python": "rocks"}) + c2.update({"goodbye": "dolly"}) + + k = ContextList([c1, c2]) + # None, True and False are builtins of BaseContext, and present + # in every Context without needing to be added. + self.assertEqual( + {"None", "True", "False", "hello", "goodbye", "python", "dolly"}, k.keys() + ) + + def test_contextlist_get(self): + c1 = Context({"hello": "world", "goodbye": "john"}) + c2 = Context({"goodbye": "world", "python": "rocks"}) + k = ContextList([c1, c2]) + self.assertEqual(k.get("hello"), "world") + self.assertEqual(k.get("goodbye"), "john") + self.assertEqual(k.get("python"), "rocks") + self.assertEqual(k.get("nonexistent", "default"), "default") + + def test_15368(self): + # Need to insert a context processor that assumes certain things about + # the request instance. This triggers a bug caused by some ways of + # copying RequestContext. + with self.settings( + TEMPLATES=[ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "test_client_regress.context_processors.special", + ], + }, + } + ] + ): + response = self.client.get("/request_context_view/") + self.assertContains(response, "Path: /request_context_view/") + + def test_nested_requests(self): + """ + response.context is not lost when view call another view. + """ + response = self.client.get("/nested_view/") + self.assertIsInstance(response.context, RequestContext) + self.assertEqual(response.context["nested"], "yes") + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class SessionTests(TestDataMixin, TestCase): + def test_session(self): + "The session isn't lost if a user logs in" + # The session doesn't exist to start. + response = self.client.get("/check_session/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"NO") + + # This request sets a session variable. + response = self.client.get("/set_session/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"set_session") + + # The session has been modified + response = self.client.get("/check_session/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"YES") + + # Log in + login = self.client.login(username="testclient", password="password") + self.assertTrue(login, "Could not log in") + + # Session should still contain the modified value + response = self.client.get("/check_session/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"YES") + + def test_session_initiated(self): + session = self.client.session + session["session_var"] = "foo" + session.save() + + response = self.client.get("/check_session/") + self.assertEqual(response.content, b"foo") + + def test_logout(self): + """Logout should work whether the user is logged in or not (#9978).""" + self.client.logout() + login = self.client.login(username="testclient", password="password") + self.assertTrue(login, "Could not log in") + self.client.logout() + self.client.logout() + + def test_logout_with_user(self): + """Logout should send user_logged_out signal if user was logged in.""" + + def listener(*args, **kwargs): + listener.executed = True + self.assertEqual(kwargs["sender"], User) + + listener.executed = False + + user_logged_out.connect(listener) + self.client.login(username="testclient", password="password") + self.client.logout() + user_logged_out.disconnect(listener) + self.assertTrue(listener.executed) + + @override_settings(AUTH_USER_MODEL="test_client_regress.CustomUser") + def test_logout_with_custom_user(self): + """Logout should send user_logged_out signal if custom user was logged in.""" + + def listener(*args, **kwargs): + self.assertEqual(kwargs["sender"], CustomUser) + listener.executed = True + + listener.executed = False + u = CustomUser.custom_objects.create(email="test@test.com") + u.set_password("password") + u.save() + + user_logged_out.connect(listener) + self.client.login(username="test@test.com", password="password") + self.client.logout() + user_logged_out.disconnect(listener) + self.assertTrue(listener.executed) + + @override_settings( + AUTHENTICATION_BACKENDS=( + "django.contrib.auth.backends.ModelBackend", + "test_client_regress.auth_backends.CustomUserBackend", + ) + ) + def test_logout_with_custom_auth_backend(self): + "Request a logout after logging in with custom authentication backend" + + def listener(*args, **kwargs): + self.assertEqual(kwargs["sender"], CustomUser) + listener.executed = True + + listener.executed = False + u = CustomUser.custom_objects.create(email="test@test.com") + u.set_password("password") + u.save() + + user_logged_out.connect(listener) + self.client.login(username="test@test.com", password="password") + self.client.logout() + user_logged_out.disconnect(listener) + self.assertTrue(listener.executed) + + def test_logout_without_user(self): + """Logout should send signal even if user not authenticated.""" + + def listener(user, *args, **kwargs): + listener.user = user + listener.executed = True + + listener.executed = False + + user_logged_out.connect(listener) + self.client.login(username="incorrect", password="password") + self.client.logout() + user_logged_out.disconnect(listener) + + self.assertTrue(listener.executed) + self.assertIsNone(listener.user) + + def test_login_with_user(self): + """Login should send user_logged_in signal on successful login.""" + + def listener(*args, **kwargs): + listener.executed = True + + listener.executed = False + + user_logged_in.connect(listener) + self.client.login(username="testclient", password="password") + user_logged_out.disconnect(listener) + + self.assertTrue(listener.executed) + + def test_login_without_signal(self): + """Login shouldn't send signal if user wasn't logged in""" + + def listener(*args, **kwargs): + listener.executed = True + + listener.executed = False + + user_logged_in.connect(listener) + self.client.login(username="incorrect", password="password") + user_logged_in.disconnect(listener) + + self.assertFalse(listener.executed) + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class RequestMethodTests(SimpleTestCase): + def test_get(self): + "Request a view via request method GET" + response = self.client.get("/request_methods/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"request method: GET") + + def test_post(self): + "Request a view via request method POST" + response = self.client.post("/request_methods/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"request method: POST") + + def test_head(self): + "Request a view via request method HEAD" + response = self.client.head("/request_methods/") + self.assertEqual(response.status_code, 200) + # A HEAD request doesn't return any content. + self.assertNotEqual(response.content, b"request method: HEAD") + self.assertEqual(response.content, b"") + + def test_options(self): + "Request a view via request method OPTIONS" + response = self.client.options("/request_methods/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"request method: OPTIONS") + + def test_put(self): + "Request a view via request method PUT" + response = self.client.put("/request_methods/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"request method: PUT") + + def test_delete(self): + "Request a view via request method DELETE" + response = self.client.delete("/request_methods/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"request method: DELETE") + + def test_patch(self): + "Request a view via request method PATCH" + response = self.client.patch("/request_methods/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"request method: PATCH") + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class RequestMethodStringDataTests(SimpleTestCase): + def test_post(self): + "Request a view with string data via request method POST" + # Regression test for #11371 + data = '{"test": "json"}' + response = self.client.post( + "/request_methods/", data=data, content_type="application/json" + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"request method: POST") + + def test_put(self): + "Request a view with string data via request method PUT" + # Regression test for #11371 + data = '{"test": "json"}' + response = self.client.put( + "/request_methods/", data=data, content_type="application/json" + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"request method: PUT") + + def test_patch(self): + "Request a view with string data via request method PATCH" + # Regression test for #17797 + data = '{"test": "json"}' + response = self.client.patch( + "/request_methods/", data=data, content_type="application/json" + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"request method: PATCH") + + def test_empty_string_data(self): + "Request a view with empty string data via request method GET/POST/HEAD" + # Regression test for #21740 + response = self.client.get("/body/", data="", content_type="application/json") + self.assertEqual(response.content, b"") + response = self.client.post("/body/", data="", content_type="application/json") + self.assertEqual(response.content, b"") + response = self.client.head("/body/", data="", content_type="application/json") + self.assertEqual(response.content, b"") + + def test_json_bytes(self): + response = self.client.post( + "/body/", data=b"{'value': 37}", content_type="application/json" + ) + self.assertEqual(response.content, b"{'value': 37}") + + def test_json(self): + response = self.client.get("/json_response/") + self.assertEqual(response.json(), {"key": "value"}) + + def test_json_charset(self): + response = self.client.get("/json_response_latin1/") + self.assertEqual(response.charset, "latin1") + self.assertEqual(response.json(), {"a": "Å"}) + + def test_json_structured_suffixes(self): + valid_types = ( + "application/vnd.api+json", + "application/vnd.api.foo+json", + "application/json; charset=utf-8", + "application/activity+json", + "application/activity+json; charset=utf-8", + ) + for content_type in valid_types: + response = self.client.get( + "/json_response/", {"content_type": content_type} + ) + self.assertEqual(response.headers["Content-Type"], content_type) + self.assertEqual(response.json(), {"key": "value"}) + + def test_json_multiple_access(self): + response = self.client.get("/json_response/") + self.assertIs(response.json(), response.json()) + + def test_json_wrong_header(self): + response = self.client.get("/body/") + msg = ( + 'Content-Type header is "text/html; charset=utf-8", not "application/json"' + ) + with self.assertRaisesMessage(ValueError, msg): + self.assertEqual(response.json(), {"key": "value"}) + + +@override_settings( + ROOT_URLCONF="test_client_regress.urls", +) +class QueryStringTests(SimpleTestCase): + def test_get_like_requests(self): + for method_name in ("get", "head"): + # A GET-like request can pass a query string as data (#10571) + method = getattr(self.client, method_name) + response = method("/request_data/", data={"foo": "whiz"}) + self.assertEqual(response.context["get-foo"], "whiz") + + # A GET-like request can pass a query string as part of the URL + response = method("/request_data/?foo=whiz") + self.assertEqual(response.context["get-foo"], "whiz") + + # Data provided in the URL to a GET-like request is overridden by + # actual form data. + response = method("/request_data/?foo=whiz", data={"foo": "bang"}) + self.assertEqual(response.context["get-foo"], "bang") + + response = method("/request_data/?foo=whiz", data={"bar": "bang"}) + self.assertIsNone(response.context["get-foo"]) + self.assertEqual(response.context["get-bar"], "bang") + + def test_post_like_requests(self): + # A POST-like request can pass a query string as data + response = self.client.post("/request_data/", data={"foo": "whiz"}) + self.assertIsNone(response.context["get-foo"]) + self.assertEqual(response.context["post-foo"], "whiz") + + # A POST-like request can pass a query string as part of the URL + response = self.client.post("/request_data/?foo=whiz") + self.assertEqual(response.context["get-foo"], "whiz") + self.assertIsNone(response.context["post-foo"]) + + # POST data provided in the URL augments actual form data + response = self.client.post("/request_data/?foo=whiz", data={"foo": "bang"}) + self.assertEqual(response.context["get-foo"], "whiz") + self.assertEqual(response.context["post-foo"], "bang") + + response = self.client.post("/request_data/?foo=whiz", data={"bar": "bang"}) + self.assertEqual(response.context["get-foo"], "whiz") + self.assertIsNone(response.context["get-bar"]) + self.assertIsNone(response.context["post-foo"]) + self.assertEqual(response.context["post-bar"], "bang") + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class PayloadEncodingTests(SimpleTestCase): + """Regression tests for #10571.""" + + def test_simple_payload(self): + """A simple ASCII-only text can be POSTed.""" + text = "English: mountain pass" + response = self.client.post( + "/parse_encoded_text/", text, content_type="text/plain" + ) + self.assertEqual(response.content, text.encode()) + + def test_utf8_payload(self): + """Non-ASCII data encoded as UTF-8 can be POSTed.""" + text = "dog: собака" + response = self.client.post( + "/parse_encoded_text/", text, content_type="text/plain; charset=utf-8" + ) + self.assertEqual(response.content, text.encode()) + + def test_utf16_payload(self): + """Non-ASCII data encoded as UTF-16 can be POSTed.""" + text = "dog: собака" + response = self.client.post( + "/parse_encoded_text/", text, content_type="text/plain; charset=utf-16" + ) + self.assertEqual(response.content, text.encode("utf-16")) + + def test_non_utf_payload(self): + """Non-ASCII data as a non-UTF based encoding can be POSTed.""" + text = "dog: собака" + response = self.client.post( + "/parse_encoded_text/", text, content_type="text/plain; charset=koi8-r" + ) + self.assertEqual(response.content, text.encode("koi8-r")) + + +class DummyFile: + def __init__(self, filename): + self.name = filename + + def read(self): + return b"TEST_FILE_CONTENT" + + +class UploadedFileEncodingTest(SimpleTestCase): + def test_file_encoding(self): + encoded_file = encode_file( + "TEST_BOUNDARY", "TEST_KEY", DummyFile("test_name.bin") + ) + self.assertEqual(b"--TEST_BOUNDARY", encoded_file[0]) + self.assertEqual( + b'Content-Disposition: form-data; name="TEST_KEY"; ' + b'filename="test_name.bin"', + encoded_file[1], + ) + self.assertEqual(b"TEST_FILE_CONTENT", encoded_file[-1]) + + def test_guesses_content_type_on_file_encoding(self): + self.assertEqual( + b"Content-Type: application/octet-stream", + encode_file("IGNORE", "IGNORE", DummyFile("file.bin"))[2], + ) + self.assertEqual( + b"Content-Type: text/plain", + encode_file("IGNORE", "IGNORE", DummyFile("file.txt"))[2], + ) + self.assertIn( + encode_file("IGNORE", "IGNORE", DummyFile("file.zip"))[2], + ( + b"Content-Type: application/x-compress", + b"Content-Type: application/x-zip", + b"Content-Type: application/x-zip-compressed", + b"Content-Type: application/zip", + ), + ) + self.assertEqual( + b"Content-Type: application/octet-stream", + encode_file("IGNORE", "IGNORE", DummyFile("file.unknown"))[2], + ) + + +@override_settings( + ROOT_URLCONF="test_client_regress.urls", +) +class RequestHeadersTest(SimpleTestCase): + def test_client_headers(self): + "A test client can receive custom headers" + response = self.client.get( + "/check_headers/", headers={"x-arg-check": "Testing 123"} + ) + self.assertEqual(response.content, b"HTTP_X_ARG_CHECK: Testing 123") + self.assertEqual(response.status_code, 200) + + def test_client_headers_redirect(self): + "Test client headers are preserved through redirects" + response = self.client.get( + "/check_headers_redirect/", + follow=True, + headers={"x-arg-check": "Testing 123"}, + ) + self.assertEqual(response.content, b"HTTP_X_ARG_CHECK: Testing 123") + self.assertRedirects( + response, "/check_headers/", status_code=302, target_status_code=200 + ) + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class ReadLimitedStreamTest(SimpleTestCase): + """ + HttpRequest.body, HttpRequest.read(), and HttpRequest.read(BUFFER) have + proper LimitedStream behavior. + + Refs #14753, #15785 + """ + + def test_body_from_empty_request(self): + """HttpRequest.body on a test client GET request should return + the empty string.""" + self.assertEqual(self.client.get("/body/").content, b"") + + def test_read_from_empty_request(self): + """HttpRequest.read() on a test client GET request should return the + empty string.""" + self.assertEqual(self.client.get("/read_all/").content, b"") + + def test_read_numbytes_from_empty_request(self): + """HttpRequest.read(LARGE_BUFFER) on a test client GET request should + return the empty string.""" + self.assertEqual(self.client.get("/read_buffer/").content, b"") + + def test_read_from_nonempty_request(self): + """HttpRequest.read() on a test client PUT request with some payload + should return that payload.""" + payload = b"foobar" + self.assertEqual( + self.client.put( + "/read_all/", data=payload, content_type="text/plain" + ).content, + payload, + ) + + def test_read_numbytes_from_nonempty_request(self): + """HttpRequest.read(LARGE_BUFFER) on a test client PUT request with + some payload should return that payload.""" + payload = b"foobar" + self.assertEqual( + self.client.put( + "/read_buffer/", data=payload, content_type="text/plain" + ).content, + payload, + ) + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class RequestFactoryStateTest(SimpleTestCase): + """Regression tests for #15929.""" + + # These tests are checking that certain middleware don't change certain + # global state. Alternatively, from the point of view of a test, they are + # ensuring test isolation behavior. So, unusually, it doesn't make sense to + # run the tests individually, and if any are failing it is confusing to run + # them with any other set of tests. + + def common_test_that_should_always_pass(self): + request = RequestFactory().get("/") + request.session = {} + self.assertFalse(hasattr(request, "user")) + + def test_request(self): + self.common_test_that_should_always_pass() + + def test_request_after_client(self): + # apart from the next line the three tests are identical + self.client.get("/") + self.common_test_that_should_always_pass() + + def test_request_after_client_2(self): + # This test is executed after the previous one + self.common_test_that_should_always_pass() + + +@override_settings(ROOT_URLCONF="test_client_regress.urls") +class RequestFactoryEnvironmentTests(SimpleTestCase): + """ + Regression tests for #8551 and #17067: ensure that environment variables + are set correctly in RequestFactory. + """ + + def test_should_set_correct_env_variables(self): + request = RequestFactory().get("/path/") + + self.assertEqual(request.META.get("REMOTE_ADDR"), "127.0.0.1") + self.assertEqual(request.META.get("SERVER_NAME"), "testserver") + self.assertEqual(request.META.get("SERVER_PORT"), "80") + self.assertEqual(request.META.get("SERVER_PROTOCOL"), "HTTP/1.1") + self.assertEqual( + request.META.get("SCRIPT_NAME") + request.META.get("PATH_INFO"), "/path/" + ) + + def test_cookies(self): + factory = RequestFactory() + factory.cookies.load('A="B"; C="D"; Path=/; Version=1') + request = factory.get("/") + self.assertEqual(request.META["HTTP_COOKIE"], 'A="B"; C="D"') diff --git a/testbed/django__django/tests/test_client_regress/urls.py b/testbed/django__django/tests/test_client_regress/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..07b61d7f41ad3e2e660f0e56953dc8ff28644f1a --- /dev/null +++ b/testbed/django__django/tests/test_client_regress/urls.py @@ -0,0 +1,66 @@ +from django.urls import include, path +from django.views.generic import RedirectView + +from . import views + +urlpatterns = [ + path("", include("test_client.urls")), + path("no_template_view/", views.no_template_view), + path("staff_only/", views.staff_only_view), + path("get_view/", views.get_view), + path("request_data/", views.request_data), + path( + "request_data_extended/", + views.request_data, + {"template": "extended.html", "data": "bacon"}, + ), + path("arg_view//", views.view_with_argument, name="arg_view"), + path("nested_view/", views.nested_view, name="nested_view"), + path("login_protected_redirect_view/", views.login_protected_redirect_view), + path("redirects/", RedirectView.as_view(url="/redirects/further/")), + path("redirects/further/", RedirectView.as_view(url="/redirects/further/more/")), + path("redirects/further/more/", RedirectView.as_view(url="/no_template_view/")), + path( + "redirect_to_non_existent_view/", + RedirectView.as_view(url="/non_existent_view/"), + ), + path( + "redirect_to_non_existent_view2/", + RedirectView.as_view(url="/redirect_to_non_existent_view/"), + ), + path("redirect_to_self/", RedirectView.as_view(url="/redirect_to_self/")), + path( + "redirect_to_self_with_changing_query_view/", + views.redirect_to_self_with_changing_query_view, + ), + path("circular_redirect_1/", RedirectView.as_view(url="/circular_redirect_2/")), + path("circular_redirect_2/", RedirectView.as_view(url="/circular_redirect_3/")), + path("circular_redirect_3/", RedirectView.as_view(url="/circular_redirect_1/")), + path( + "redirect_other_host/", + RedirectView.as_view(url="https://otherserver:8443/no_template_view/"), + ), + path( + "redirect_based_on_extra_headers_1/", + views.redirect_based_on_extra_headers_1_view, + ), + path( + "redirect_based_on_extra_headers_2/", + views.redirect_based_on_extra_headers_2_view, + ), + path("set_session/", views.set_session_view), + path("check_session/", views.check_session_view), + path("request_methods/", views.request_methods_view), + path("check_unicode/", views.return_unicode), + path("check_binary/", views.return_undecodable_binary), + path("json_response/", views.return_json_response), + path("json_response_latin1/", views.return_json_response_latin1), + path("parse_encoded_text/", views.return_text_file), + path("check_headers/", views.check_headers), + path("check_headers_redirect/", RedirectView.as_view(url="/check_headers/")), + path("body/", views.body), + path("read_all/", views.read_all), + path("read_buffer/", views.read_buffer), + path("request_context_view/", views.request_context_view), + path("render_template_multiple_times/", views.render_template_multiple_times), +] diff --git a/testbed/django__django/tests/test_client_regress/views.py b/testbed/django__django/tests/test_client_regress/views.py new file mode 100644 index 0000000000000000000000000000000000000000..91b8bdfefc4a44d39ce19849e76a211744d36267 --- /dev/null +++ b/testbed/django__django/tests/test_client_regress/views.py @@ -0,0 +1,182 @@ +from urllib.parse import urlencode + +from django.conf import settings +from django.contrib.auth.decorators import login_required +from django.http import HttpResponse, HttpResponseRedirect, JsonResponse +from django.shortcuts import render +from django.template.loader import render_to_string +from django.test import Client +from django.test.client import CONTENT_TYPE_RE + + +class CustomTestException(Exception): + pass + + +def no_template_view(request): + "A simple view that expects a GET request, and returns a rendered template" + return HttpResponse( + "No template used. Sample content: twice once twice. Content ends." + ) + + +def staff_only_view(request): + "A view that can only be visited by staff. Non staff members get an exception" + if request.user.is_staff: + return HttpResponse() + else: + raise CustomTestException() + + +@login_required +def get_view(request): + "A simple login protected view" + return HttpResponse("Hello world") + + +def request_data(request, template="base.html", data="sausage"): + "A simple view that returns the request data in the context" + return render( + request, + template, + { + "get-foo": request.GET.get("foo"), + "get-bar": request.GET.get("bar"), + "post-foo": request.POST.get("foo"), + "post-bar": request.POST.get("bar"), + "data": data, + }, + ) + + +def view_with_argument(request, name): + """A view that takes a string argument + + The purpose of this view is to check that if a space is provided in + the argument, the test framework unescapes the %20 before passing + the value to the view. + """ + if name == "Arthur Dent": + return HttpResponse("Hi, Arthur") + else: + return HttpResponse("Howdy, %s" % name) + + +def nested_view(request): + """ + A view that uses test client to call another view. + """ + c = Client() + c.get("/no_template_view/") + return render(request, "base.html", {"nested": "yes"}) + + +@login_required +def login_protected_redirect_view(request): + "A view that redirects all requests to the GET view" + return HttpResponseRedirect("/get_view/") + + +def redirect_to_self_with_changing_query_view(request): + query = request.GET.copy() + query["counter"] += "0" + return HttpResponseRedirect( + "/redirect_to_self_with_changing_query_view/?%s" % urlencode(query) + ) + + +def set_session_view(request): + "A view that sets a session variable" + request.session["session_var"] = "YES" + return HttpResponse("set_session") + + +def check_session_view(request): + "A view that reads a session variable" + return HttpResponse(request.session.get("session_var", "NO")) + + +def request_methods_view(request): + "A view that responds with the request method" + return HttpResponse("request method: %s" % request.method) + + +def return_unicode(request): + return render(request, "unicode.html") + + +def return_undecodable_binary(request): + return HttpResponse( + b"%PDF-1.4\r\n%\x93\x8c\x8b\x9e ReportLab Generated PDF document " + b"http://www.reportlab.com" + ) + + +def return_json_response(request): + content_type = request.GET.get("content_type") + kwargs = {"content_type": content_type} if content_type else {} + return JsonResponse({"key": "value"}, **kwargs) + + +def return_json_response_latin1(request): + return HttpResponse( + b'{"a":"\xc5"}', content_type="application/json; charset=latin1" + ) + + +def return_text_file(request): + "A view that parses and returns text as a file." + match = CONTENT_TYPE_RE.match(request.META["CONTENT_TYPE"]) + if match: + charset = match[1] + else: + charset = settings.DEFAULT_CHARSET + + return HttpResponse( + request.body, status=200, content_type="text/plain; charset=%s" % charset + ) + + +def check_headers(request): + "A view that responds with value of the X-ARG-CHECK header" + return HttpResponse( + "HTTP_X_ARG_CHECK: %s" % request.META.get("HTTP_X_ARG_CHECK", "Undefined") + ) + + +def body(request): + "A view that is requested with GET and accesses request.body. Refs #14753." + return HttpResponse(request.body) + + +def read_all(request): + "A view that is requested with accesses request.read()." + return HttpResponse(request.read()) + + +def read_buffer(request): + "A view that is requested with accesses request.read(LARGE_BUFFER)." + return HttpResponse(request.read(99999)) + + +def request_context_view(request): + # Special attribute that won't be present on a plain HttpRequest + request.special_path = request.path + return render(request, "request_context.html") + + +def render_template_multiple_times(request): + """A view that renders a template multiple times.""" + return HttpResponse(render_to_string("base.html") + render_to_string("base.html")) + + +def redirect_based_on_extra_headers_1_view(request): + if "HTTP_REDIRECT" in request.META: + return HttpResponseRedirect("/redirect_based_on_extra_headers_2/") + return HttpResponse() + + +def redirect_based_on_extra_headers_2_view(request): + if "HTTP_REDIRECT" in request.META: + return HttpResponseRedirect("/redirects/further/more/") + return HttpResponse() diff --git a/testbed/django__django/tests/test_exceptions/__init__.py b/testbed/django__django/tests/test_exceptions/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_exceptions/test_validation_error.py b/testbed/django__django/tests/test_exceptions/test_validation_error.py new file mode 100644 index 0000000000000000000000000000000000000000..aaca5582afb9492209a99c5d7c952b7c092d288d --- /dev/null +++ b/testbed/django__django/tests/test_exceptions/test_validation_error.py @@ -0,0 +1,369 @@ +import unittest +from unittest import mock + +from django.core.exceptions import ValidationError + + +class TestValidationError(unittest.TestCase): + def test_messages_concatenates_error_dict_values(self): + message_dict = {} + exception = ValidationError(message_dict) + self.assertEqual(sorted(exception.messages), []) + message_dict["field1"] = ["E1", "E2"] + exception = ValidationError(message_dict) + self.assertEqual(sorted(exception.messages), ["E1", "E2"]) + message_dict["field2"] = ["E3", "E4"] + exception = ValidationError(message_dict) + self.assertEqual(sorted(exception.messages), ["E1", "E2", "E3", "E4"]) + + def test_eq(self): + error1 = ValidationError("message") + error2 = ValidationError("message", code="my_code1") + error3 = ValidationError("message", code="my_code2") + error4 = ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code1", + params={"parm1": "val1", "parm2": "val2"}, + ) + error5 = ValidationError({"field1": "message", "field2": "other"}) + error6 = ValidationError({"field1": "message"}) + error7 = ValidationError( + [ + ValidationError({"field1": "field error", "field2": "other"}), + "message", + ] + ) + + self.assertEqual(error1, ValidationError("message")) + self.assertNotEqual(error1, ValidationError("message2")) + self.assertNotEqual(error1, error2) + self.assertNotEqual(error1, error4) + self.assertNotEqual(error1, error5) + self.assertNotEqual(error1, error6) + self.assertNotEqual(error1, error7) + self.assertEqual(error1, mock.ANY) + self.assertEqual(error2, ValidationError("message", code="my_code1")) + self.assertNotEqual(error2, ValidationError("other", code="my_code1")) + self.assertNotEqual(error2, error3) + self.assertNotEqual(error2, error4) + self.assertNotEqual(error2, error5) + self.assertNotEqual(error2, error6) + self.assertNotEqual(error2, error7) + + self.assertEqual( + error4, + ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code1", + params={"parm1": "val1", "parm2": "val2"}, + ), + ) + self.assertNotEqual( + error4, + ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code2", + params={"parm1": "val1", "parm2": "val2"}, + ), + ) + self.assertNotEqual( + error4, + ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code1", + params={"parm2": "val2"}, + ), + ) + self.assertNotEqual( + error4, + ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code1", + params={"parm2": "val1", "parm1": "val2"}, + ), + ) + self.assertNotEqual( + error4, + ValidationError( + "error val1 val2", + code="my_code1", + ), + ) + # params ordering is ignored. + self.assertEqual( + error4, + ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code1", + params={"parm2": "val2", "parm1": "val1"}, + ), + ) + + self.assertEqual( + error5, + ValidationError({"field1": "message", "field2": "other"}), + ) + self.assertNotEqual( + error5, + ValidationError({"field1": "message", "field2": "other2"}), + ) + self.assertNotEqual( + error5, + ValidationError({"field1": "message", "field3": "other"}), + ) + self.assertNotEqual(error5, error6) + # fields ordering is ignored. + self.assertEqual( + error5, + ValidationError({"field2": "other", "field1": "message"}), + ) + + self.assertNotEqual(error7, ValidationError(error7.error_list[1:])) + self.assertNotEqual( + ValidationError(["message"]), + ValidationError([ValidationError("message", code="my_code")]), + ) + # messages ordering is ignored. + self.assertEqual( + error7, + ValidationError(list(reversed(error7.error_list))), + ) + + self.assertNotEqual(error4, ValidationError([error4])) + self.assertNotEqual(ValidationError([error4]), error4) + self.assertNotEqual(error4, ValidationError({"field1": error4})) + self.assertNotEqual(ValidationError({"field1": error4}), error4) + + def test_eq_nested(self): + error_dict = { + "field1": ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code", + params={"parm1": "val1", "parm2": "val2"}, + ), + "field2": "other", + } + error = ValidationError(error_dict) + self.assertEqual(error, ValidationError(dict(error_dict))) + self.assertEqual( + error, + ValidationError( + { + "field1": ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code", + params={"parm2": "val2", "parm1": "val1"}, + ), + "field2": "other", + } + ), + ) + self.assertNotEqual( + error, + ValidationError( + {**error_dict, "field2": "message"}, + ), + ) + self.assertNotEqual( + error, + ValidationError( + { + "field1": ValidationError( + "error %(parm1)s val2", + code="my_code", + params={"parm1": "val1"}, + ), + "field2": "other", + } + ), + ) + + def test_hash(self): + error1 = ValidationError("message") + error2 = ValidationError("message", code="my_code1") + error3 = ValidationError("message", code="my_code2") + error4 = ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code1", + params={"parm1": "val1", "parm2": "val2"}, + ) + error5 = ValidationError({"field1": "message", "field2": "other"}) + error6 = ValidationError({"field1": "message"}) + error7 = ValidationError( + [ + ValidationError({"field1": "field error", "field2": "other"}), + "message", + ] + ) + + self.assertEqual(hash(error1), hash(ValidationError("message"))) + self.assertNotEqual(hash(error1), hash(ValidationError("message2"))) + self.assertNotEqual(hash(error1), hash(error2)) + self.assertNotEqual(hash(error1), hash(error4)) + self.assertNotEqual(hash(error1), hash(error5)) + self.assertNotEqual(hash(error1), hash(error6)) + self.assertNotEqual(hash(error1), hash(error7)) + self.assertEqual( + hash(error2), + hash(ValidationError("message", code="my_code1")), + ) + self.assertNotEqual( + hash(error2), + hash(ValidationError("other", code="my_code1")), + ) + self.assertNotEqual(hash(error2), hash(error3)) + self.assertNotEqual(hash(error2), hash(error4)) + self.assertNotEqual(hash(error2), hash(error5)) + self.assertNotEqual(hash(error2), hash(error6)) + self.assertNotEqual(hash(error2), hash(error7)) + + self.assertEqual( + hash(error4), + hash( + ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code1", + params={"parm1": "val1", "parm2": "val2"}, + ) + ), + ) + self.assertNotEqual( + hash(error4), + hash( + ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code2", + params={"parm1": "val1", "parm2": "val2"}, + ) + ), + ) + self.assertNotEqual( + hash(error4), + hash( + ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code1", + params={"parm2": "val2"}, + ) + ), + ) + self.assertNotEqual( + hash(error4), + hash( + ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code1", + params={"parm2": "val1", "parm1": "val2"}, + ) + ), + ) + self.assertNotEqual( + hash(error4), + hash( + ValidationError( + "error val1 val2", + code="my_code1", + ) + ), + ) + # params ordering is ignored. + self.assertEqual( + hash(error4), + hash( + ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code1", + params={"parm2": "val2", "parm1": "val1"}, + ) + ), + ) + + self.assertEqual( + hash(error5), + hash(ValidationError({"field1": "message", "field2": "other"})), + ) + self.assertNotEqual( + hash(error5), + hash(ValidationError({"field1": "message", "field2": "other2"})), + ) + self.assertNotEqual( + hash(error5), + hash(ValidationError({"field1": "message", "field3": "other"})), + ) + self.assertNotEqual(error5, error6) + # fields ordering is ignored. + self.assertEqual( + hash(error5), + hash(ValidationError({"field2": "other", "field1": "message"})), + ) + + self.assertNotEqual( + hash(error7), + hash(ValidationError(error7.error_list[1:])), + ) + self.assertNotEqual( + hash(ValidationError(["message"])), + hash(ValidationError([ValidationError("message", code="my_code")])), + ) + # messages ordering is ignored. + self.assertEqual( + hash(error7), + hash(ValidationError(list(reversed(error7.error_list)))), + ) + + self.assertNotEqual(hash(error4), hash(ValidationError([error4]))) + self.assertNotEqual(hash(ValidationError([error4])), hash(error4)) + self.assertNotEqual( + hash(error4), + hash(ValidationError({"field1": error4})), + ) + + def test_hash_nested(self): + error_dict = { + "field1": ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code", + params={"parm2": "val2", "parm1": "val1"}, + ), + "field2": "other", + } + error = ValidationError(error_dict) + self.assertEqual(hash(error), hash(ValidationError(dict(error_dict)))) + self.assertEqual( + hash(error), + hash( + ValidationError( + { + "field1": ValidationError( + "error %(parm1)s %(parm2)s", + code="my_code", + params={"parm1": "val1", "parm2": "val2"}, + ), + "field2": "other", + } + ) + ), + ) + self.assertNotEqual( + hash(error), + hash( + ValidationError( + {**error_dict, "field2": "message"}, + ) + ), + ) + self.assertNotEqual( + hash(error), + hash( + ValidationError( + { + "field1": ValidationError( + "error %(parm1)s val2", + code="my_code", + params={"parm1": "val1"}, + ), + "field2": "other", + } + ) + ), + ) diff --git a/testbed/django__django/tests/test_runner/__init__.py b/testbed/django__django/tests/test_runner/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_runner/models.py b/testbed/django__django/tests/test_runner/models.py new file mode 100644 index 0000000000000000000000000000000000000000..80bf8dd8c7690bdf301a36b97e7752151b9fefb9 --- /dev/null +++ b/testbed/django__django/tests/test_runner/models.py @@ -0,0 +1,21 @@ +from django.db import models + + +class Person(models.Model): + first_name = models.CharField(max_length=20) + last_name = models.CharField(max_length=20) + friends = models.ManyToManyField("self") + + +# A set of models that use a non-abstract inherited 'through' model. +class ThroughBase(models.Model): + person = models.ForeignKey(Person, models.CASCADE) + b = models.ForeignKey("B", models.CASCADE) + + +class Through(ThroughBase): + extra = models.CharField(max_length=20) + + +class B(models.Model): + people = models.ManyToManyField(Person, through=Through) diff --git a/testbed/django__django/tests/test_runner/runner.py b/testbed/django__django/tests/test_runner/runner.py new file mode 100644 index 0000000000000000000000000000000000000000..5c34b343d9113776f63a29d979cfc6f9e54daba1 --- /dev/null +++ b/testbed/django__django/tests/test_runner/runner.py @@ -0,0 +1,29 @@ +from django.test.runner import DiscoverRunner + + +class CustomOptionsTestRunner(DiscoverRunner): + def __init__( + self, + verbosity=1, + interactive=True, + failfast=True, + option_a=None, + option_b=None, + option_c=None, + **kwargs, + ): + super().__init__( + verbosity=verbosity, interactive=interactive, failfast=failfast + ) + self.option_a = option_a + self.option_b = option_b + self.option_c = option_c + + @classmethod + def add_arguments(cls, parser): + parser.add_argument("--option_a", "-a", default="1"), + parser.add_argument("--option_b", "-b", default="2"), + parser.add_argument("--option_c", "-c", default="3"), + + def run_tests(self, test_labels, **kwargs): + print("%s:%s:%s" % (self.option_a, self.option_b, self.option_c)) diff --git a/testbed/django__django/tests/test_runner/test_debug_sql.py b/testbed/django__django/tests/test_runner/test_debug_sql.py new file mode 100644 index 0000000000000000000000000000000000000000..ef517fac2910daa8293c09e29ed11c327b02782b --- /dev/null +++ b/testbed/django__django/tests/test_runner/test_debug_sql.py @@ -0,0 +1,153 @@ +import unittest +from io import StringIO + +from django.db import connection +from django.test import TestCase +from django.test.runner import DiscoverRunner +from django.utils.version import PY311 + +from .models import Person + + +@unittest.skipUnless( + connection.vendor == "sqlite", "Only run on sqlite so we can check output SQL." +) +class TestDebugSQL(unittest.TestCase): + class PassingTest(TestCase): + def runTest(self): + Person.objects.filter(first_name="pass").count() + + class FailingTest(TestCase): + def runTest(self): + Person.objects.filter(first_name="fail").count() + self.fail() + + class ErrorTest(TestCase): + def runTest(self): + Person.objects.filter(first_name="error").count() + raise Exception + + class ErrorSetUpTestDataTest(TestCase): + @classmethod + def setUpTestData(cls): + raise Exception + + def runTest(self): + pass + + class PassingSubTest(TestCase): + def runTest(self): + with self.subTest(): + Person.objects.filter(first_name="subtest-pass").count() + + class FailingSubTest(TestCase): + def runTest(self): + with self.subTest(): + Person.objects.filter(first_name="subtest-fail").count() + self.fail() + + class ErrorSubTest(TestCase): + def runTest(self): + with self.subTest(): + Person.objects.filter(first_name="subtest-error").count() + raise Exception + + def _test_output(self, verbosity): + runner = DiscoverRunner(debug_sql=True, verbosity=0) + suite = runner.test_suite() + suite.addTest(self.FailingTest()) + suite.addTest(self.ErrorTest()) + suite.addTest(self.PassingTest()) + suite.addTest(self.PassingSubTest()) + suite.addTest(self.FailingSubTest()) + suite.addTest(self.ErrorSubTest()) + old_config = runner.setup_databases() + stream = StringIO() + resultclass = runner.get_resultclass() + runner.test_runner( + verbosity=verbosity, + stream=stream, + resultclass=resultclass, + ).run(suite) + runner.teardown_databases(old_config) + + return stream.getvalue() + + def test_output_normal(self): + full_output = self._test_output(1) + for output in self.expected_outputs: + self.assertIn(output, full_output) + for output in self.verbose_expected_outputs: + self.assertNotIn(output, full_output) + + def test_output_verbose(self): + full_output = self._test_output(2) + for output in self.expected_outputs: + self.assertIn(output, full_output) + for output in self.verbose_expected_outputs: + self.assertIn(output, full_output) + + expected_outputs = [ + ( + """SELECT COUNT(*) AS "__count"\n""" + """FROM "test_runner_person"\n""" + """WHERE "test_runner_person"."first_name" = 'error';""" + ), + ( + """SELECT COUNT(*) AS "__count"\n""" + """FROM "test_runner_person"\n""" + """WHERE "test_runner_person"."first_name" = 'fail';""" + ), + ( + """SELECT COUNT(*) AS "__count"\n""" + """FROM "test_runner_person"\n""" + """WHERE "test_runner_person"."first_name" = 'subtest-error';""" + ), + ( + """SELECT COUNT(*) AS "__count"\n""" + """FROM "test_runner_person"\n""" + """WHERE "test_runner_person"."first_name" = 'subtest-fail';""" + ), + ] + + # Python 3.11 uses fully qualified test name in the output. + method_name = ".runTest" if PY311 else "" + test_class_path = "test_runner.test_debug_sql.TestDebugSQL" + verbose_expected_outputs = [ + f"runTest ({test_class_path}.FailingTest{method_name}) ... FAIL", + f"runTest ({test_class_path}.ErrorTest{method_name}) ... ERROR", + f"runTest ({test_class_path}.PassingTest{method_name}) ... ok", + # If there are errors/failures in subtests but not in test itself, + # the status is not written. That behavior comes from Python. + f"runTest ({test_class_path}.FailingSubTest{method_name}) ...", + f"runTest ({test_class_path}.ErrorSubTest{method_name}) ...", + ( + """SELECT COUNT(*) AS "__count" """ + """FROM "test_runner_person" WHERE """ + """"test_runner_person"."first_name" = 'pass';""" + ), + ( + """SELECT COUNT(*) AS "__count" """ + """FROM "test_runner_person" WHERE """ + """"test_runner_person"."first_name" = 'subtest-pass';""" + ), + ] + + def test_setupclass_exception(self): + runner = DiscoverRunner(debug_sql=True, verbosity=0) + suite = runner.test_suite() + suite.addTest(self.ErrorSetUpTestDataTest()) + old_config = runner.setup_databases() + stream = StringIO() + runner.test_runner( + verbosity=0, + stream=stream, + resultclass=runner.get_resultclass(), + ).run(suite) + runner.teardown_databases(old_config) + output = stream.getvalue() + self.assertIn( + "ERROR: setUpClass " + "(test_runner.test_debug_sql.TestDebugSQL.ErrorSetUpTestDataTest)", + output, + ) diff --git a/testbed/django__django/tests/test_runner/test_discover_runner.py b/testbed/django__django/tests/test_runner/test_discover_runner.py new file mode 100644 index 0000000000000000000000000000000000000000..bca90374923c61861db731fed86abedcdbb45443 --- /dev/null +++ b/testbed/django__django/tests/test_runner/test_discover_runner.py @@ -0,0 +1,831 @@ +import logging +import multiprocessing +import os +import unittest.loader +from argparse import ArgumentParser +from contextlib import contextmanager +from importlib import import_module +from unittest import TestSuite, TextTestRunner, defaultTestLoader, mock + +from django.db import connections +from django.test import SimpleTestCase +from django.test.runner import DiscoverRunner, get_max_test_processes +from django.test.utils import ( + NullTimeKeeper, + TimeKeeper, + captured_stderr, + captured_stdout, +) + + +@contextmanager +def change_cwd(directory): + current_dir = os.path.abspath(os.path.dirname(__file__)) + new_dir = os.path.join(current_dir, directory) + old_cwd = os.getcwd() + os.chdir(new_dir) + try: + yield + finally: + os.chdir(old_cwd) + + +@contextmanager +def change_loader_patterns(patterns): + original_patterns = DiscoverRunner.test_loader.testNamePatterns + DiscoverRunner.test_loader.testNamePatterns = patterns + try: + yield + finally: + DiscoverRunner.test_loader.testNamePatterns = original_patterns + + +# Isolate from the real environment. +@mock.patch.dict(os.environ, {}, clear=True) +@mock.patch.object(multiprocessing, "cpu_count", return_value=12) +# Python 3.8 on macOS defaults to 'spawn' mode. +@mock.patch.object(multiprocessing, "get_start_method", return_value="fork") +class DiscoverRunnerParallelArgumentTests(SimpleTestCase): + def get_parser(self): + parser = ArgumentParser() + DiscoverRunner.add_arguments(parser) + return parser + + def test_parallel_default(self, *mocked_objects): + result = self.get_parser().parse_args([]) + self.assertEqual(result.parallel, 0) + + def test_parallel_flag(self, *mocked_objects): + result = self.get_parser().parse_args(["--parallel"]) + self.assertEqual(result.parallel, "auto") + + def test_parallel_auto(self, *mocked_objects): + result = self.get_parser().parse_args(["--parallel", "auto"]) + self.assertEqual(result.parallel, "auto") + + def test_parallel_count(self, *mocked_objects): + result = self.get_parser().parse_args(["--parallel", "17"]) + self.assertEqual(result.parallel, 17) + + def test_parallel_invalid(self, *mocked_objects): + with self.assertRaises(SystemExit), captured_stderr() as stderr: + self.get_parser().parse_args(["--parallel", "unaccepted"]) + msg = "argument --parallel: 'unaccepted' is not an integer or the string 'auto'" + self.assertIn(msg, stderr.getvalue()) + + def test_get_max_test_processes(self, *mocked_objects): + self.assertEqual(get_max_test_processes(), 12) + + @mock.patch.dict(os.environ, {"DJANGO_TEST_PROCESSES": "7"}) + def test_get_max_test_processes_env_var(self, *mocked_objects): + self.assertEqual(get_max_test_processes(), 7) + + def test_get_max_test_processes_spawn( + self, + mocked_get_start_method, + mocked_cpu_count, + ): + mocked_get_start_method.return_value = "spawn" + self.assertEqual(get_max_test_processes(), 12) + with mock.patch.dict(os.environ, {"DJANGO_TEST_PROCESSES": "7"}): + self.assertEqual(get_max_test_processes(), 7) + + def test_get_max_test_processes_forkserver( + self, + mocked_get_start_method, + mocked_cpu_count, + ): + mocked_get_start_method.return_value = "forkserver" + self.assertEqual(get_max_test_processes(), 1) + with mock.patch.dict(os.environ, {"DJANGO_TEST_PROCESSES": "7"}): + self.assertEqual(get_max_test_processes(), 1) + + +class DiscoverRunnerTests(SimpleTestCase): + @staticmethod + def get_test_methods_names(suite): + return [t.__class__.__name__ + "." + t._testMethodName for t in suite._tests] + + def test_init_debug_mode(self): + runner = DiscoverRunner() + self.assertFalse(runner.debug_mode) + + def test_add_arguments_shuffle(self): + parser = ArgumentParser() + DiscoverRunner.add_arguments(parser) + ns = parser.parse_args([]) + self.assertIs(ns.shuffle, False) + ns = parser.parse_args(["--shuffle"]) + self.assertIsNone(ns.shuffle) + ns = parser.parse_args(["--shuffle", "5"]) + self.assertEqual(ns.shuffle, 5) + + def test_add_arguments_debug_mode(self): + parser = ArgumentParser() + DiscoverRunner.add_arguments(parser) + + ns = parser.parse_args([]) + self.assertFalse(ns.debug_mode) + ns = parser.parse_args(["--debug-mode"]) + self.assertTrue(ns.debug_mode) + + def test_setup_shuffler_no_shuffle_argument(self): + runner = DiscoverRunner() + self.assertIs(runner.shuffle, False) + runner.setup_shuffler() + self.assertIsNone(runner.shuffle_seed) + + def test_setup_shuffler_shuffle_none(self): + runner = DiscoverRunner(shuffle=None) + self.assertIsNone(runner.shuffle) + with mock.patch("random.randint", return_value=1): + with captured_stdout() as stdout: + runner.setup_shuffler() + self.assertEqual(stdout.getvalue(), "Using shuffle seed: 1 (generated)\n") + self.assertEqual(runner.shuffle_seed, 1) + + def test_setup_shuffler_shuffle_int(self): + runner = DiscoverRunner(shuffle=2) + self.assertEqual(runner.shuffle, 2) + with captured_stdout() as stdout: + runner.setup_shuffler() + expected_out = "Using shuffle seed: 2 (given)\n" + self.assertEqual(stdout.getvalue(), expected_out) + self.assertEqual(runner.shuffle_seed, 2) + + def test_load_tests_for_label_file_path(self): + with change_cwd("."): + msg = ( + "One of the test labels is a path to a file: " + "'test_discover_runner.py', which is not supported. Use a " + "dotted module name or path to a directory instead." + ) + with self.assertRaisesMessage(RuntimeError, msg): + DiscoverRunner().load_tests_for_label("test_discover_runner.py", {}) + + def test_dotted_test_module(self): + count = ( + DiscoverRunner(verbosity=0) + .build_suite( + ["test_runner_apps.sample.tests_sample"], + ) + .countTestCases() + ) + + self.assertEqual(count, 4) + + def test_dotted_test_class_vanilla_unittest(self): + count = ( + DiscoverRunner(verbosity=0) + .build_suite( + ["test_runner_apps.sample.tests_sample.TestVanillaUnittest"], + ) + .countTestCases() + ) + + self.assertEqual(count, 1) + + def test_dotted_test_class_django_testcase(self): + count = ( + DiscoverRunner(verbosity=0) + .build_suite( + ["test_runner_apps.sample.tests_sample.TestDjangoTestCase"], + ) + .countTestCases() + ) + + self.assertEqual(count, 1) + + def test_dotted_test_method_django_testcase(self): + count = ( + DiscoverRunner(verbosity=0) + .build_suite( + ["test_runner_apps.sample.tests_sample.TestDjangoTestCase.test_sample"], + ) + .countTestCases() + ) + + self.assertEqual(count, 1) + + def test_pattern(self): + count = ( + DiscoverRunner( + pattern="*_tests.py", + verbosity=0, + ) + .build_suite(["test_runner_apps.sample"]) + .countTestCases() + ) + + self.assertEqual(count, 1) + + def test_name_patterns(self): + all_test_1 = [ + "DjangoCase1.test_1", + "DjangoCase2.test_1", + "SimpleCase1.test_1", + "SimpleCase2.test_1", + "UnittestCase1.test_1", + "UnittestCase2.test_1", + ] + all_test_2 = [ + "DjangoCase1.test_2", + "DjangoCase2.test_2", + "SimpleCase1.test_2", + "SimpleCase2.test_2", + "UnittestCase1.test_2", + "UnittestCase2.test_2", + ] + all_tests = sorted([*all_test_1, *all_test_2, "UnittestCase2.test_3_test"]) + for pattern, expected in [ + [["test_1"], all_test_1], + [["UnittestCase1"], ["UnittestCase1.test_1", "UnittestCase1.test_2"]], + [["*test"], ["UnittestCase2.test_3_test"]], + [["test*"], all_tests], + [["test"], all_tests], + [["test_1", "test_2"], sorted([*all_test_1, *all_test_2])], + [["test*1"], all_test_1], + ]: + with self.subTest(pattern): + suite = DiscoverRunner( + test_name_patterns=pattern, + verbosity=0, + ).build_suite(["test_runner_apps.simple"]) + self.assertEqual(expected, self.get_test_methods_names(suite)) + + def test_loader_patterns_not_mutated(self): + runner = DiscoverRunner(test_name_patterns=["test_sample"], verbosity=0) + tests = [ + ("test_runner_apps.sample.tests", 1), + ("test_runner_apps.sample.tests.Test.test_sample", 1), + ("test_runner_apps.sample.empty", 0), + ("test_runner_apps.sample.tests_sample.EmptyTestCase", 0), + ] + for test_labels, tests_count in tests: + with self.subTest(test_labels=test_labels): + with change_loader_patterns(["UnittestCase1"]): + count = runner.build_suite([test_labels]).countTestCases() + self.assertEqual(count, tests_count) + self.assertEqual( + runner.test_loader.testNamePatterns, ["UnittestCase1"] + ) + + def test_loader_patterns_not_mutated_when_test_label_is_file_path(self): + runner = DiscoverRunner(test_name_patterns=["test_sample"], verbosity=0) + with change_cwd("."), change_loader_patterns(["UnittestCase1"]): + with self.assertRaises(RuntimeError): + runner.build_suite(["test_discover_runner.py"]) + self.assertEqual(runner.test_loader.testNamePatterns, ["UnittestCase1"]) + + def test_file_path(self): + with change_cwd(".."): + count = ( + DiscoverRunner(verbosity=0) + .build_suite( + ["test_runner_apps/sample/"], + ) + .countTestCases() + ) + + self.assertEqual(count, 5) + + def test_empty_label(self): + """ + If the test label is empty, discovery should happen on the current + working directory. + """ + with change_cwd("."): + suite = DiscoverRunner(verbosity=0).build_suite([]) + self.assertEqual( + suite._tests[0].id().split(".")[0], + os.path.basename(os.getcwd()), + ) + + def test_empty_test_case(self): + count = ( + DiscoverRunner(verbosity=0) + .build_suite( + ["test_runner_apps.sample.tests_sample.EmptyTestCase"], + ) + .countTestCases() + ) + + self.assertEqual(count, 0) + + def test_discovery_on_package(self): + count = ( + DiscoverRunner(verbosity=0) + .build_suite( + ["test_runner_apps.sample.tests"], + ) + .countTestCases() + ) + + self.assertEqual(count, 1) + + def test_ignore_adjacent(self): + """ + When given a dotted path to a module, unittest discovery searches + not just the module, but also the directory containing the module. + + This results in tests from adjacent modules being run when they + should not. The discover runner avoids this behavior. + """ + count = ( + DiscoverRunner(verbosity=0) + .build_suite( + ["test_runner_apps.sample.empty"], + ) + .countTestCases() + ) + + self.assertEqual(count, 0) + + def test_testcase_ordering(self): + with change_cwd(".."): + suite = DiscoverRunner(verbosity=0).build_suite( + ["test_runner_apps/sample/"] + ) + self.assertEqual( + suite._tests[0].__class__.__name__, + "TestDjangoTestCase", + msg="TestDjangoTestCase should be the first test case", + ) + self.assertEqual( + suite._tests[1].__class__.__name__, + "TestZimpleTestCase", + msg="TestZimpleTestCase should be the second test case", + ) + # All others can follow in unspecified order, including doctests + self.assertIn( + "DocTestCase", [t.__class__.__name__ for t in suite._tests[2:]] + ) + + def test_duplicates_ignored(self): + """ + Tests shouldn't be discovered twice when discovering on overlapping paths. + """ + base_app = "forms_tests" + sub_app = "forms_tests.field_tests" + runner = DiscoverRunner(verbosity=0) + with self.modify_settings(INSTALLED_APPS={"append": sub_app}): + single = runner.build_suite([base_app]).countTestCases() + dups = runner.build_suite([base_app, sub_app]).countTestCases() + self.assertEqual(single, dups) + + def test_reverse(self): + """ + Reverse should reorder tests while maintaining the grouping specified + by ``DiscoverRunner.reorder_by``. + """ + runner = DiscoverRunner(reverse=True, verbosity=0) + suite = runner.build_suite( + test_labels=("test_runner_apps.sample", "test_runner_apps.simple") + ) + self.assertIn( + "test_runner_apps.simple", + next(iter(suite)).id(), + msg="Test labels should be reversed.", + ) + suite = runner.build_suite(test_labels=("test_runner_apps.simple",)) + suite = tuple(suite) + self.assertIn( + "DjangoCase", suite[0].id(), msg="Test groups should not be reversed." + ) + self.assertIn( + "SimpleCase", suite[4].id(), msg="Test groups order should be preserved." + ) + self.assertIn( + "DjangoCase2", suite[0].id(), msg="Django test cases should be reversed." + ) + self.assertIn( + "SimpleCase2", suite[4].id(), msg="Simple test cases should be reversed." + ) + self.assertIn( + "UnittestCase2", + suite[8].id(), + msg="Unittest test cases should be reversed.", + ) + self.assertIn( + "test_2", suite[0].id(), msg="Methods of Django cases should be reversed." + ) + self.assertIn( + "test_2", suite[4].id(), msg="Methods of simple cases should be reversed." + ) + self.assertIn( + "test_2", suite[9].id(), msg="Methods of unittest cases should be reversed." + ) + + def test_build_suite_failed_tests_first(self): + # The "doesnotexist" label results in a _FailedTest instance. + suite = DiscoverRunner(verbosity=0).build_suite( + test_labels=["test_runner_apps.sample", "doesnotexist"], + ) + tests = list(suite) + self.assertIsInstance(tests[0], unittest.loader._FailedTest) + self.assertNotIsInstance(tests[-1], unittest.loader._FailedTest) + + def test_build_suite_shuffling(self): + # These will result in unittest.loader._FailedTest instances rather + # than TestCase objects, but they are sufficient for testing. + labels = ["label1", "label2", "label3", "label4"] + cases = [ + ({}, ["label1", "label2", "label3", "label4"]), + ({"reverse": True}, ["label4", "label3", "label2", "label1"]), + ({"shuffle": 8}, ["label4", "label1", "label3", "label2"]), + ({"shuffle": 8, "reverse": True}, ["label2", "label3", "label1", "label4"]), + ] + for kwargs, expected in cases: + with self.subTest(kwargs=kwargs): + # Prevent writing the seed to stdout. + runner = DiscoverRunner(**kwargs, verbosity=0) + tests = runner.build_suite(test_labels=labels) + # The ids have the form "unittest.loader._FailedTest.label1". + names = [test.id().split(".")[-1] for test in tests] + self.assertEqual(names, expected) + + def test_overridable_get_test_runner_kwargs(self): + self.assertIsInstance(DiscoverRunner().get_test_runner_kwargs(), dict) + + def test_overridable_test_suite(self): + self.assertEqual(DiscoverRunner().test_suite, TestSuite) + + def test_overridable_test_runner(self): + self.assertEqual(DiscoverRunner().test_runner, TextTestRunner) + + def test_overridable_test_loader(self): + self.assertEqual(DiscoverRunner().test_loader, defaultTestLoader) + + def test_tags(self): + runner = DiscoverRunner(tags=["core"], verbosity=0) + self.assertEqual( + runner.build_suite(["test_runner_apps.tagged.tests"]).countTestCases(), 1 + ) + runner = DiscoverRunner(tags=["fast"], verbosity=0) + self.assertEqual( + runner.build_suite(["test_runner_apps.tagged.tests"]).countTestCases(), 2 + ) + runner = DiscoverRunner(tags=["slow"], verbosity=0) + self.assertEqual( + runner.build_suite(["test_runner_apps.tagged.tests"]).countTestCases(), 2 + ) + + def test_exclude_tags(self): + runner = DiscoverRunner(tags=["fast"], exclude_tags=["core"], verbosity=0) + self.assertEqual( + runner.build_suite(["test_runner_apps.tagged.tests"]).countTestCases(), 1 + ) + runner = DiscoverRunner(tags=["fast"], exclude_tags=["slow"], verbosity=0) + self.assertEqual( + runner.build_suite(["test_runner_apps.tagged.tests"]).countTestCases(), 0 + ) + runner = DiscoverRunner(exclude_tags=["slow"], verbosity=0) + self.assertEqual( + runner.build_suite(["test_runner_apps.tagged.tests"]).countTestCases(), 0 + ) + + def test_tag_inheritance(self): + def count_tests(**kwargs): + kwargs.setdefault("verbosity", 0) + suite = DiscoverRunner(**kwargs).build_suite( + ["test_runner_apps.tagged.tests_inheritance"] + ) + return suite.countTestCases() + + self.assertEqual(count_tests(tags=["foo"]), 4) + self.assertEqual(count_tests(tags=["bar"]), 2) + self.assertEqual(count_tests(tags=["baz"]), 2) + self.assertEqual(count_tests(tags=["foo"], exclude_tags=["bar"]), 2) + self.assertEqual(count_tests(tags=["foo"], exclude_tags=["bar", "baz"]), 1) + self.assertEqual(count_tests(exclude_tags=["foo"]), 0) + + def test_tag_fail_to_load(self): + with self.assertRaises(SyntaxError): + import_module("test_runner_apps.tagged.tests_syntax_error") + runner = DiscoverRunner(tags=["syntax_error"], verbosity=0) + # A label that doesn't exist or cannot be loaded due to syntax errors + # is always considered matching. + suite = runner.build_suite(["doesnotexist", "test_runner_apps.tagged"]) + self.assertEqual( + [test.id() for test in suite], + [ + "unittest.loader._FailedTest.doesnotexist", + "unittest.loader._FailedTest.test_runner_apps.tagged." + "tests_syntax_error", + ], + ) + + def test_included_tags_displayed(self): + runner = DiscoverRunner(tags=["foo", "bar"], verbosity=2) + with captured_stdout() as stdout: + runner.build_suite(["test_runner_apps.tagged.tests"]) + self.assertIn("Including test tag(s): bar, foo.\n", stdout.getvalue()) + + def test_excluded_tags_displayed(self): + runner = DiscoverRunner(exclude_tags=["foo", "bar"], verbosity=3) + with captured_stdout() as stdout: + runner.build_suite(["test_runner_apps.tagged.tests"]) + self.assertIn("Excluding test tag(s): bar, foo.\n", stdout.getvalue()) + + def test_number_of_tests_found_displayed(self): + runner = DiscoverRunner() + with captured_stdout() as stdout: + runner.build_suite( + [ + "test_runner_apps.sample.tests_sample.TestDjangoTestCase", + "test_runner_apps.simple", + ] + ) + self.assertIn("Found 14 test(s).\n", stdout.getvalue()) + + def test_pdb_with_parallel(self): + msg = "You cannot use --pdb with parallel tests; pass --parallel=1 to use it." + with self.assertRaisesMessage(ValueError, msg): + DiscoverRunner(pdb=True, parallel=2) + + def test_number_of_parallel_workers(self): + """Number of processes doesn't exceed the number of TestCases.""" + runner = DiscoverRunner(parallel=5, verbosity=0) + suite = runner.build_suite(["test_runner_apps.tagged"]) + self.assertEqual(suite.processes, len(suite.subsuites)) + + def test_number_of_databases_parallel_test_suite(self): + """ + Number of databases doesn't exceed the number of TestCases with + parallel tests. + """ + runner = DiscoverRunner(parallel=8, verbosity=0) + suite = runner.build_suite(["test_runner_apps.tagged"]) + self.assertEqual(suite.processes, len(suite.subsuites)) + self.assertEqual(runner.parallel, suite.processes) + + def test_number_of_databases_no_parallel_test_suite(self): + """ + Number of databases doesn't exceed the number of TestCases with + non-parallel tests. + """ + runner = DiscoverRunner(parallel=8, verbosity=0) + suite = runner.build_suite(["test_runner_apps.simple.tests.DjangoCase1"]) + self.assertEqual(runner.parallel, 1) + self.assertIsInstance(suite, TestSuite) + + def test_buffer_mode_test_pass(self): + runner = DiscoverRunner(buffer=True, verbosity=0) + with captured_stdout() as stdout, captured_stderr() as stderr: + suite = runner.build_suite( + [ + "test_runner_apps.buffer.tests_buffer.WriteToStdoutStderrTestCase." + "test_pass", + ] + ) + runner.run_suite(suite) + self.assertNotIn("Write to stderr.", stderr.getvalue()) + self.assertNotIn("Write to stdout.", stdout.getvalue()) + + def test_buffer_mode_test_fail(self): + runner = DiscoverRunner(buffer=True, verbosity=0) + with captured_stdout() as stdout, captured_stderr() as stderr: + suite = runner.build_suite( + [ + "test_runner_apps.buffer.tests_buffer.WriteToStdoutStderrTestCase." + "test_fail", + ] + ) + runner.run_suite(suite) + self.assertIn("Write to stderr.", stderr.getvalue()) + self.assertIn("Write to stdout.", stdout.getvalue()) + + def run_suite_with_runner(self, runner_class, **kwargs): + class MyRunner(DiscoverRunner): + def test_runner(self, *args, **kwargs): + return runner_class() + + runner = MyRunner(**kwargs) + # Suppress logging "Using shuffle seed" to the console. + with captured_stdout(): + runner.setup_shuffler() + with captured_stdout() as stdout: + try: + result = runner.run_suite(None) + except RuntimeError as exc: + result = str(exc) + output = stdout.getvalue() + return result, output + + def test_run_suite_logs_seed(self): + class TestRunner: + def run(self, suite): + return "" + + expected_prefix = "Used shuffle seed" + # Test with and without shuffling enabled. + result, output = self.run_suite_with_runner(TestRunner) + self.assertEqual(result, "") + self.assertNotIn(expected_prefix, output) + + result, output = self.run_suite_with_runner(TestRunner, shuffle=2) + self.assertEqual(result, "") + expected_output = f"{expected_prefix}: 2 (given)\n" + self.assertEqual(output, expected_output) + + def test_run_suite_logs_seed_exception(self): + """ + run_suite() logs the seed when TestRunner.run() raises an exception. + """ + + class TestRunner: + def run(self, suite): + raise RuntimeError("my exception") + + result, output = self.run_suite_with_runner(TestRunner, shuffle=2) + self.assertEqual(result, "my exception") + expected_output = "Used shuffle seed: 2 (given)\n" + self.assertEqual(output, expected_output) + + @mock.patch("faulthandler.enable") + def test_faulthandler_enabled(self, mocked_enable): + with mock.patch("faulthandler.is_enabled", return_value=False): + DiscoverRunner(enable_faulthandler=True) + mocked_enable.assert_called() + + @mock.patch("faulthandler.enable") + def test_faulthandler_already_enabled(self, mocked_enable): + with mock.patch("faulthandler.is_enabled", return_value=True): + DiscoverRunner(enable_faulthandler=True) + mocked_enable.assert_not_called() + + @mock.patch("faulthandler.enable") + def test_faulthandler_enabled_fileno(self, mocked_enable): + # sys.stderr that is not an actual file. + with mock.patch( + "faulthandler.is_enabled", return_value=False + ), captured_stderr(): + DiscoverRunner(enable_faulthandler=True) + mocked_enable.assert_called() + + @mock.patch("faulthandler.enable") + def test_faulthandler_disabled(self, mocked_enable): + with mock.patch("faulthandler.is_enabled", return_value=False): + DiscoverRunner(enable_faulthandler=False) + mocked_enable.assert_not_called() + + def test_timings_not_captured(self): + runner = DiscoverRunner(timing=False) + with captured_stderr() as stderr: + with runner.time_keeper.timed("test"): + pass + runner.time_keeper.print_results() + self.assertIsInstance(runner.time_keeper, NullTimeKeeper) + self.assertNotIn("test", stderr.getvalue()) + + def test_timings_captured(self): + runner = DiscoverRunner(timing=True) + with captured_stderr() as stderr: + with runner.time_keeper.timed("test"): + pass + runner.time_keeper.print_results() + self.assertIsInstance(runner.time_keeper, TimeKeeper) + self.assertIn("test", stderr.getvalue()) + + def test_log(self): + custom_low_level = 5 + custom_high_level = 45 + msg = "logging message" + cases = [ + (0, None, False), + (0, custom_low_level, False), + (0, logging.DEBUG, False), + (0, logging.INFO, False), + (0, logging.WARNING, False), + (0, custom_high_level, False), + (1, None, True), + (1, custom_low_level, False), + (1, logging.DEBUG, False), + (1, logging.INFO, True), + (1, logging.WARNING, True), + (1, custom_high_level, True), + (2, None, True), + (2, custom_low_level, True), + (2, logging.DEBUG, True), + (2, logging.INFO, True), + (2, logging.WARNING, True), + (2, custom_high_level, True), + (3, None, True), + (3, custom_low_level, True), + (3, logging.DEBUG, True), + (3, logging.INFO, True), + (3, logging.WARNING, True), + (3, custom_high_level, True), + ] + for verbosity, level, output in cases: + with self.subTest(verbosity=verbosity, level=level): + with captured_stdout() as stdout: + runner = DiscoverRunner(verbosity=verbosity) + runner.log(msg, level) + self.assertEqual(stdout.getvalue(), f"{msg}\n" if output else "") + + def test_log_logger(self): + logger = logging.getLogger("test.logging") + cases = [ + (None, "INFO:test.logging:log message"), + # Test a low custom logging level. + (5, "Level 5:test.logging:log message"), + (logging.DEBUG, "DEBUG:test.logging:log message"), + (logging.INFO, "INFO:test.logging:log message"), + (logging.WARNING, "WARNING:test.logging:log message"), + # Test a high custom logging level. + (45, "Level 45:test.logging:log message"), + ] + for level, expected in cases: + with self.subTest(level=level): + runner = DiscoverRunner(logger=logger) + # Pass a logging level smaller than the smallest level in cases + # in order to capture all messages. + with self.assertLogs("test.logging", level=1) as cm: + runner.log("log message", level) + self.assertEqual(cm.output, [expected]) + + def test_suite_result_with_failure(self): + cases = [ + (1, "FailureTestCase"), + (1, "ErrorTestCase"), + (0, "ExpectedFailureTestCase"), + (1, "UnexpectedSuccessTestCase"), + ] + runner = DiscoverRunner(verbosity=0) + for expected_failures, testcase in cases: + with self.subTest(testcase=testcase): + suite = runner.build_suite( + [ + f"test_runner_apps.failures.tests_failures.{testcase}", + ] + ) + with captured_stderr(): + result = runner.run_suite(suite) + failures = runner.suite_result(suite, result) + self.assertEqual(failures, expected_failures) + + +class DiscoverRunnerGetDatabasesTests(SimpleTestCase): + runner = DiscoverRunner(verbosity=2) + skip_msg = "Skipping setup of unused database(s): " + + def get_databases(self, test_labels): + with captured_stdout() as stdout: + suite = self.runner.build_suite(test_labels) + databases = self.runner.get_databases(suite) + return databases, stdout.getvalue() + + def assertSkippedDatabases(self, test_labels, expected_databases): + databases, output = self.get_databases(test_labels) + self.assertEqual(databases, expected_databases) + skipped_databases = set(connections) - set(expected_databases) + if skipped_databases: + self.assertIn(self.skip_msg + ", ".join(sorted(skipped_databases)), output) + else: + self.assertNotIn(self.skip_msg, output) + + def test_mixed(self): + databases, output = self.get_databases(["test_runner_apps.databases.tests"]) + self.assertEqual(databases, {"default": True, "other": False}) + self.assertNotIn(self.skip_msg, output) + + def test_all(self): + databases, output = self.get_databases( + ["test_runner_apps.databases.tests.AllDatabasesTests"] + ) + self.assertEqual(databases, {alias: False for alias in connections}) + self.assertNotIn(self.skip_msg, output) + + def test_default_and_other(self): + self.assertSkippedDatabases( + [ + "test_runner_apps.databases.tests.DefaultDatabaseTests", + "test_runner_apps.databases.tests.OtherDatabaseTests", + ], + {"default": False, "other": False}, + ) + + def test_default_only(self): + self.assertSkippedDatabases( + [ + "test_runner_apps.databases.tests.DefaultDatabaseTests", + ], + {"default": False}, + ) + + def test_other_only(self): + self.assertSkippedDatabases( + ["test_runner_apps.databases.tests.OtherDatabaseTests"], {"other": False} + ) + + def test_no_databases_required(self): + self.assertSkippedDatabases( + ["test_runner_apps.databases.tests.NoDatabaseTests"], {} + ) + + def test_serialize(self): + databases, _ = self.get_databases( + ["test_runner_apps.databases.tests.DefaultDatabaseSerializedTests"] + ) + self.assertEqual(databases, {"default": True}) diff --git a/testbed/django__django/tests/test_runner/test_parallel.py b/testbed/django__django/tests/test_runner/test_parallel.py new file mode 100644 index 0000000000000000000000000000000000000000..eea9e4de74ed87161848b312f22e01288570f7d3 --- /dev/null +++ b/testbed/django__django/tests/test_runner/test_parallel.py @@ -0,0 +1,135 @@ +import pickle +import sys +import unittest + +from django.test import SimpleTestCase +from django.test.runner import RemoteTestResult +from django.utils.version import PY311 + +try: + import tblib.pickling_support +except ImportError: + tblib = None + + +class ExceptionThatFailsUnpickling(Exception): + """ + After pickling, this class fails unpickling with an error about incorrect + arguments passed to __init__(). + """ + + def __init__(self, arg): + super().__init__() + + +class ParallelTestRunnerTest(SimpleTestCase): + """ + End-to-end tests of the parallel test runner. + + These tests are only meaningful when running tests in parallel using + the --parallel option, though it doesn't hurt to run them not in + parallel. + """ + + def test_subtest(self): + """ + Passing subtests work. + """ + for i in range(2): + with self.subTest(index=i): + self.assertEqual(i, i) + + +class SampleFailingSubtest(SimpleTestCase): + # This method name doesn't begin with "test" to prevent test discovery + # from seeing it. + def dummy_test(self): + """ + A dummy test for testing subTest failures. + """ + for i in range(3): + with self.subTest(index=i): + self.assertEqual(i, 1) + + +class RemoteTestResultTest(SimpleTestCase): + def _test_error_exc_info(self): + try: + raise ValueError("woops") + except ValueError: + return sys.exc_info() + + def test_was_successful_no_events(self): + result = RemoteTestResult() + self.assertIs(result.wasSuccessful(), True) + + def test_was_successful_one_success(self): + result = RemoteTestResult() + result.addSuccess(None) + self.assertIs(result.wasSuccessful(), True) + + def test_was_successful_one_expected_failure(self): + result = RemoteTestResult() + result.addExpectedFailure(None, self._test_error_exc_info()) + self.assertIs(result.wasSuccessful(), True) + + def test_was_successful_one_skip(self): + result = RemoteTestResult() + result.addSkip(None, "Skipped") + self.assertIs(result.wasSuccessful(), True) + + @unittest.skipUnless(tblib is not None, "requires tblib to be installed") + def test_was_successful_one_error(self): + result = RemoteTestResult() + result.addError(None, self._test_error_exc_info()) + self.assertIs(result.wasSuccessful(), False) + + @unittest.skipUnless(tblib is not None, "requires tblib to be installed") + def test_was_successful_one_failure(self): + result = RemoteTestResult() + result.addFailure(None, self._test_error_exc_info()) + self.assertIs(result.wasSuccessful(), False) + + def test_picklable(self): + result = RemoteTestResult() + loaded_result = pickle.loads(pickle.dumps(result)) + self.assertEqual(result.events, loaded_result.events) + + def test_pickle_errors_detection(self): + picklable_error = RuntimeError("This is fine") + not_unpicklable_error = ExceptionThatFailsUnpickling("arg") + + result = RemoteTestResult() + result._confirm_picklable(picklable_error) + + msg = "__init__() missing 1 required positional argument" + with self.assertRaisesMessage(TypeError, msg): + result._confirm_picklable(not_unpicklable_error) + + @unittest.skipUnless(tblib is not None, "requires tblib to be installed") + def test_add_failing_subtests(self): + """ + Failing subtests are added correctly using addSubTest(). + """ + # Manually run a test with failing subtests to prevent the failures + # from affecting the actual test run. + result = RemoteTestResult() + subtest_test = SampleFailingSubtest(methodName="dummy_test") + subtest_test.run(result=result) + + events = result.events + self.assertEqual(len(events), 4) + self.assertIs(result.wasSuccessful(), False) + + event = events[1] + self.assertEqual(event[0], "addSubTest") + self.assertEqual( + str(event[2]), + "dummy_test (test_runner.test_parallel.SampleFailingSubtest%s) (index=0)" + # Python 3.11 uses fully qualified test name in the output. + % (".dummy_test" if PY311 else ""), + ) + self.assertEqual(repr(event[3][1]), "AssertionError('0 != 1')") + + event = events[2] + self.assertEqual(repr(event[3][1]), "AssertionError('2 != 1')") diff --git a/testbed/django__django/tests/test_runner/test_shuffler.py b/testbed/django__django/tests/test_runner/test_shuffler.py new file mode 100644 index 0000000000000000000000000000000000000000..c6ecee5cb0d6ea87fdc3faa2c15b3b2747ac81bc --- /dev/null +++ b/testbed/django__django/tests/test_runner/test_shuffler.py @@ -0,0 +1,101 @@ +from unittest import mock + +from django.test import SimpleTestCase +from django.test.runner import Shuffler + + +class ShufflerTests(SimpleTestCase): + def test_hash_text(self): + actual = Shuffler._hash_text("abcd") + self.assertEqual(actual, "e2fc714c4727ee9395f324cd2e7f331f") + + def test_hash_text_hash_algorithm(self): + class MyShuffler(Shuffler): + hash_algorithm = "sha1" + + actual = MyShuffler._hash_text("abcd") + self.assertEqual(actual, "81fe8bfe87576c3ecb22426f8e57847382917acf") + + def test_init(self): + shuffler = Shuffler(100) + self.assertEqual(shuffler.seed, 100) + self.assertEqual(shuffler.seed_source, "given") + + def test_init_none_seed(self): + with mock.patch("random.randint", return_value=200): + shuffler = Shuffler(None) + self.assertEqual(shuffler.seed, 200) + self.assertEqual(shuffler.seed_source, "generated") + + def test_init_no_seed_argument(self): + with mock.patch("random.randint", return_value=300): + shuffler = Shuffler() + self.assertEqual(shuffler.seed, 300) + self.assertEqual(shuffler.seed_source, "generated") + + def test_seed_display(self): + shuffler = Shuffler(100) + shuffler.seed_source = "test" + self.assertEqual(shuffler.seed_display, "100 (test)") + + def test_hash_item_seed(self): + cases = [ + (1234, "64ad3fb166ddb41a2ca24f1803b8b722"), + # Passing a string gives the same value. + ("1234", "64ad3fb166ddb41a2ca24f1803b8b722"), + (5678, "4dde450ad339b6ce45a0a2666e35b975"), + ] + for seed, expected in cases: + with self.subTest(seed=seed): + shuffler = Shuffler(seed=seed) + actual = shuffler._hash_item("abc", lambda x: x) + self.assertEqual(actual, expected) + + def test_hash_item_key(self): + cases = [ + (lambda x: x, "64ad3fb166ddb41a2ca24f1803b8b722"), + (lambda x: x.upper(), "ee22e8597bff91742affe4befbf4649a"), + ] + for key, expected in cases: + with self.subTest(key=key): + shuffler = Shuffler(seed=1234) + actual = shuffler._hash_item("abc", key) + self.assertEqual(actual, expected) + + def test_shuffle_key(self): + cases = [ + (lambda x: x, ["a", "d", "b", "c"]), + (lambda x: x.upper(), ["d", "c", "a", "b"]), + ] + for num, (key, expected) in enumerate(cases, start=1): + with self.subTest(num=num): + shuffler = Shuffler(seed=1234) + actual = shuffler.shuffle(["a", "b", "c", "d"], key) + self.assertEqual(actual, expected) + + def test_shuffle_consistency(self): + seq = [str(n) for n in range(5)] + cases = [ + (None, ["3", "0", "2", "4", "1"]), + (0, ["3", "2", "4", "1"]), + (1, ["3", "0", "2", "4"]), + (2, ["3", "0", "4", "1"]), + (3, ["0", "2", "4", "1"]), + (4, ["3", "0", "2", "1"]), + ] + shuffler = Shuffler(seed=1234) + for index, expected in cases: + with self.subTest(index=index): + if index is None: + new_seq = seq + else: + new_seq = seq.copy() + del new_seq[index] + actual = shuffler.shuffle(new_seq, lambda x: x) + self.assertEqual(actual, expected) + + def test_shuffle_same_hash(self): + shuffler = Shuffler(seed=1234) + msg = "item 'A' has same hash 'a56ce89262959e151ee2266552f1819c' as item 'a'" + with self.assertRaisesMessage(RuntimeError, msg): + shuffler.shuffle(["a", "b", "A"], lambda x: x.upper()) diff --git a/testbed/django__django/tests/test_runner/tests.py b/testbed/django__django/tests/test_runner/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..28145be3dbaf1e4b002b3464b1dbe32a9d8e0b86 --- /dev/null +++ b/testbed/django__django/tests/test_runner/tests.py @@ -0,0 +1,1033 @@ +""" +Tests for django test runner +""" +import collections.abc +import multiprocessing +import os +import sys +import unittest +from unittest import mock + +from admin_scripts.tests import AdminScriptTestCase + +from django import db +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured +from django.core.management import call_command +from django.core.management.base import SystemCheckError +from django.test import SimpleTestCase, TransactionTestCase, skipUnlessDBFeature +from django.test.runner import ( + DiscoverRunner, + Shuffler, + _init_worker, + reorder_test_bin, + reorder_tests, + shuffle_tests, +) +from django.test.testcases import connections_support_transactions +from django.test.utils import ( + captured_stderr, + dependency_ordered, + get_unique_databases_and_mirrors, + iter_test_cases, +) + +from .models import B, Person, Through + + +class MySuite: + def __init__(self): + self.tests = [] + + def addTest(self, test): + self.tests.append(test) + + def __iter__(self): + yield from self.tests + + +class TestSuiteTests(SimpleTestCase): + def build_test_suite(self, test_classes, suite=None, suite_class=None): + if suite_class is None: + suite_class = unittest.TestSuite + if suite is None: + suite = suite_class() + + loader = unittest.defaultTestLoader + for test_class in test_classes: + tests = loader.loadTestsFromTestCase(test_class) + subsuite = suite_class() + # Only use addTest() to simplify testing a custom TestSuite. + for test in tests: + subsuite.addTest(test) + suite.addTest(subsuite) + + return suite + + def make_test_suite(self, suite=None, suite_class=None): + class Tests1(unittest.TestCase): + def test1(self): + pass + + def test2(self): + pass + + class Tests2(unittest.TestCase): + def test1(self): + pass + + def test2(self): + pass + + return self.build_test_suite( + (Tests1, Tests2), + suite=suite, + suite_class=suite_class, + ) + + def assertTestNames(self, tests, expected): + # Each test.id() has a form like the following: + # "test_runner.tests.IterTestCasesTests.test_iter_test_cases..Tests1.test1". + # It suffices to check only the last two parts. + names = [".".join(test.id().split(".")[-2:]) for test in tests] + self.assertEqual(names, expected) + + def test_iter_test_cases_basic(self): + suite = self.make_test_suite() + tests = iter_test_cases(suite) + self.assertTestNames( + tests, + expected=[ + "Tests1.test1", + "Tests1.test2", + "Tests2.test1", + "Tests2.test2", + ], + ) + + def test_iter_test_cases_string_input(self): + msg = ( + "Test 'a' must be a test case or test suite not string (was found " + "in 'abc')." + ) + with self.assertRaisesMessage(TypeError, msg): + list(iter_test_cases("abc")) + + def test_iter_test_cases_iterable_of_tests(self): + class Tests(unittest.TestCase): + def test1(self): + pass + + def test2(self): + pass + + tests = list(unittest.defaultTestLoader.loadTestsFromTestCase(Tests)) + actual_tests = iter_test_cases(tests) + self.assertTestNames( + actual_tests, + expected=[ + "Tests.test1", + "Tests.test2", + ], + ) + + def test_iter_test_cases_custom_test_suite_class(self): + suite = self.make_test_suite(suite_class=MySuite) + tests = iter_test_cases(suite) + self.assertTestNames( + tests, + expected=[ + "Tests1.test1", + "Tests1.test2", + "Tests2.test1", + "Tests2.test2", + ], + ) + + def test_iter_test_cases_mixed_test_suite_classes(self): + suite = self.make_test_suite(suite=MySuite()) + child_suite = list(suite)[0] + self.assertNotIsInstance(child_suite, MySuite) + tests = list(iter_test_cases(suite)) + self.assertEqual(len(tests), 4) + self.assertNotIsInstance(tests[0], unittest.TestSuite) + + def make_tests(self): + """Return an iterable of tests.""" + suite = self.make_test_suite() + return list(iter_test_cases(suite)) + + def test_shuffle_tests(self): + tests = self.make_tests() + # Choose a seed that shuffles both the classes and methods. + shuffler = Shuffler(seed=9) + shuffled_tests = shuffle_tests(tests, shuffler) + self.assertIsInstance(shuffled_tests, collections.abc.Iterator) + self.assertTestNames( + shuffled_tests, + expected=[ + "Tests2.test1", + "Tests2.test2", + "Tests1.test2", + "Tests1.test1", + ], + ) + + def test_reorder_test_bin_no_arguments(self): + tests = self.make_tests() + reordered_tests = reorder_test_bin(tests) + self.assertIsInstance(reordered_tests, collections.abc.Iterator) + self.assertTestNames( + reordered_tests, + expected=[ + "Tests1.test1", + "Tests1.test2", + "Tests2.test1", + "Tests2.test2", + ], + ) + + def test_reorder_test_bin_reverse(self): + tests = self.make_tests() + reordered_tests = reorder_test_bin(tests, reverse=True) + self.assertIsInstance(reordered_tests, collections.abc.Iterator) + self.assertTestNames( + reordered_tests, + expected=[ + "Tests2.test2", + "Tests2.test1", + "Tests1.test2", + "Tests1.test1", + ], + ) + + def test_reorder_test_bin_random(self): + tests = self.make_tests() + # Choose a seed that shuffles both the classes and methods. + shuffler = Shuffler(seed=9) + reordered_tests = reorder_test_bin(tests, shuffler=shuffler) + self.assertIsInstance(reordered_tests, collections.abc.Iterator) + self.assertTestNames( + reordered_tests, + expected=[ + "Tests2.test1", + "Tests2.test2", + "Tests1.test2", + "Tests1.test1", + ], + ) + + def test_reorder_test_bin_random_and_reverse(self): + tests = self.make_tests() + # Choose a seed that shuffles both the classes and methods. + shuffler = Shuffler(seed=9) + reordered_tests = reorder_test_bin(tests, shuffler=shuffler, reverse=True) + self.assertIsInstance(reordered_tests, collections.abc.Iterator) + self.assertTestNames( + reordered_tests, + expected=[ + "Tests1.test1", + "Tests1.test2", + "Tests2.test2", + "Tests2.test1", + ], + ) + + def test_reorder_tests_same_type_consecutive(self): + """Tests of the same type are made consecutive.""" + tests = self.make_tests() + # Move the last item to the front. + tests.insert(0, tests.pop()) + self.assertTestNames( + tests, + expected=[ + "Tests2.test2", + "Tests1.test1", + "Tests1.test2", + "Tests2.test1", + ], + ) + reordered_tests = reorder_tests(tests, classes=[]) + self.assertTestNames( + reordered_tests, + expected=[ + "Tests2.test2", + "Tests2.test1", + "Tests1.test1", + "Tests1.test2", + ], + ) + + def test_reorder_tests_random(self): + tests = self.make_tests() + # Choose a seed that shuffles both the classes and methods. + shuffler = Shuffler(seed=9) + reordered_tests = reorder_tests(tests, classes=[], shuffler=shuffler) + self.assertIsInstance(reordered_tests, collections.abc.Iterator) + self.assertTestNames( + reordered_tests, + expected=[ + "Tests2.test1", + "Tests2.test2", + "Tests1.test2", + "Tests1.test1", + ], + ) + + def test_reorder_tests_random_mixed_classes(self): + tests = self.make_tests() + # Move the last item to the front. + tests.insert(0, tests.pop()) + shuffler = Shuffler(seed=9) + self.assertTestNames( + tests, + expected=[ + "Tests2.test2", + "Tests1.test1", + "Tests1.test2", + "Tests2.test1", + ], + ) + reordered_tests = reorder_tests(tests, classes=[], shuffler=shuffler) + self.assertTestNames( + reordered_tests, + expected=[ + "Tests2.test1", + "Tests2.test2", + "Tests1.test2", + "Tests1.test1", + ], + ) + + def test_reorder_tests_reverse_with_duplicates(self): + class Tests1(unittest.TestCase): + def test1(self): + pass + + class Tests2(unittest.TestCase): + def test2(self): + pass + + def test3(self): + pass + + suite = self.build_test_suite((Tests1, Tests2)) + subsuite = list(suite)[0] + suite.addTest(subsuite) + tests = list(iter_test_cases(suite)) + self.assertTestNames( + tests, + expected=[ + "Tests1.test1", + "Tests2.test2", + "Tests2.test3", + "Tests1.test1", + ], + ) + reordered_tests = reorder_tests(tests, classes=[]) + self.assertTestNames( + reordered_tests, + expected=[ + "Tests1.test1", + "Tests2.test2", + "Tests2.test3", + ], + ) + reordered_tests = reorder_tests(tests, classes=[], reverse=True) + self.assertTestNames( + reordered_tests, + expected=[ + "Tests2.test3", + "Tests2.test2", + "Tests1.test1", + ], + ) + + +class DependencyOrderingTests(unittest.TestCase): + def test_simple_dependencies(self): + raw = [ + ("s1", ("s1_db", ["alpha"])), + ("s2", ("s2_db", ["bravo"])), + ("s3", ("s3_db", ["charlie"])), + ] + dependencies = { + "alpha": ["charlie"], + "bravo": ["charlie"], + } + + ordered = dependency_ordered(raw, dependencies=dependencies) + ordered_sigs = [sig for sig, value in ordered] + + self.assertIn("s1", ordered_sigs) + self.assertIn("s2", ordered_sigs) + self.assertIn("s3", ordered_sigs) + self.assertLess(ordered_sigs.index("s3"), ordered_sigs.index("s1")) + self.assertLess(ordered_sigs.index("s3"), ordered_sigs.index("s2")) + + def test_chained_dependencies(self): + raw = [ + ("s1", ("s1_db", ["alpha"])), + ("s2", ("s2_db", ["bravo"])), + ("s3", ("s3_db", ["charlie"])), + ] + dependencies = { + "alpha": ["bravo"], + "bravo": ["charlie"], + } + + ordered = dependency_ordered(raw, dependencies=dependencies) + ordered_sigs = [sig for sig, value in ordered] + + self.assertIn("s1", ordered_sigs) + self.assertIn("s2", ordered_sigs) + self.assertIn("s3", ordered_sigs) + + # Explicit dependencies + self.assertLess(ordered_sigs.index("s2"), ordered_sigs.index("s1")) + self.assertLess(ordered_sigs.index("s3"), ordered_sigs.index("s2")) + + # Implied dependencies + self.assertLess(ordered_sigs.index("s3"), ordered_sigs.index("s1")) + + def test_multiple_dependencies(self): + raw = [ + ("s1", ("s1_db", ["alpha"])), + ("s2", ("s2_db", ["bravo"])), + ("s3", ("s3_db", ["charlie"])), + ("s4", ("s4_db", ["delta"])), + ] + dependencies = { + "alpha": ["bravo", "delta"], + "bravo": ["charlie"], + "delta": ["charlie"], + } + + ordered = dependency_ordered(raw, dependencies=dependencies) + ordered_sigs = [sig for sig, aliases in ordered] + + self.assertIn("s1", ordered_sigs) + self.assertIn("s2", ordered_sigs) + self.assertIn("s3", ordered_sigs) + self.assertIn("s4", ordered_sigs) + + # Explicit dependencies + self.assertLess(ordered_sigs.index("s2"), ordered_sigs.index("s1")) + self.assertLess(ordered_sigs.index("s4"), ordered_sigs.index("s1")) + self.assertLess(ordered_sigs.index("s3"), ordered_sigs.index("s2")) + self.assertLess(ordered_sigs.index("s3"), ordered_sigs.index("s4")) + + # Implicit dependencies + self.assertLess(ordered_sigs.index("s3"), ordered_sigs.index("s1")) + + def test_circular_dependencies(self): + raw = [ + ("s1", ("s1_db", ["alpha"])), + ("s2", ("s2_db", ["bravo"])), + ] + dependencies = { + "bravo": ["alpha"], + "alpha": ["bravo"], + } + + with self.assertRaises(ImproperlyConfigured): + dependency_ordered(raw, dependencies=dependencies) + + def test_own_alias_dependency(self): + raw = [("s1", ("s1_db", ["alpha", "bravo"]))] + dependencies = {"alpha": ["bravo"]} + + with self.assertRaises(ImproperlyConfigured): + dependency_ordered(raw, dependencies=dependencies) + + # reordering aliases shouldn't matter + raw = [("s1", ("s1_db", ["bravo", "alpha"]))] + + with self.assertRaises(ImproperlyConfigured): + dependency_ordered(raw, dependencies=dependencies) + + +class MockTestRunner: + def __init__(self, *args, **kwargs): + if parallel := kwargs.get("parallel"): + sys.stderr.write(f"parallel={parallel}") + + +MockTestRunner.run_tests = mock.Mock(return_value=[]) + + +class ManageCommandTests(unittest.TestCase): + def test_custom_test_runner(self): + call_command("test", "sites", testrunner="test_runner.tests.MockTestRunner") + MockTestRunner.run_tests.assert_called_with(("sites",)) + + def test_bad_test_runner(self): + with self.assertRaises(AttributeError): + call_command("test", "sites", testrunner="test_runner.NonexistentRunner") + + def test_time_recorded(self): + with captured_stderr() as stderr: + call_command( + "test", + "--timing", + "sites", + testrunner="test_runner.tests.MockTestRunner", + ) + self.assertIn("Total run took", stderr.getvalue()) + + +# Isolate from the real environment. +@mock.patch.dict(os.environ, {}, clear=True) +@mock.patch.object(multiprocessing, "cpu_count", return_value=12) +class ManageCommandParallelTests(SimpleTestCase): + def test_parallel_default(self, *mocked_objects): + with captured_stderr() as stderr: + call_command( + "test", + "--parallel", + testrunner="test_runner.tests.MockTestRunner", + ) + self.assertIn("parallel=12", stderr.getvalue()) + + def test_parallel_auto(self, *mocked_objects): + with captured_stderr() as stderr: + call_command( + "test", + "--parallel=auto", + testrunner="test_runner.tests.MockTestRunner", + ) + self.assertIn("parallel=12", stderr.getvalue()) + + def test_no_parallel(self, *mocked_objects): + with captured_stderr() as stderr: + call_command("test", testrunner="test_runner.tests.MockTestRunner") + # Parallel is disabled by default. + self.assertEqual(stderr.getvalue(), "") + + @mock.patch.object(multiprocessing, "get_start_method", return_value="spawn") + def test_parallel_spawn(self, *mocked_objects): + with captured_stderr() as stderr: + call_command( + "test", + "--parallel=auto", + testrunner="test_runner.tests.MockTestRunner", + ) + self.assertIn("parallel=1", stderr.getvalue()) + + @mock.patch.object(multiprocessing, "get_start_method", return_value="spawn") + def test_no_parallel_spawn(self, *mocked_objects): + with captured_stderr() as stderr: + call_command( + "test", + testrunner="test_runner.tests.MockTestRunner", + ) + self.assertEqual(stderr.getvalue(), "") + + @mock.patch.dict(os.environ, {"DJANGO_TEST_PROCESSES": "7"}) + def test_no_parallel_django_test_processes_env(self, *mocked_objects): + with captured_stderr() as stderr: + call_command("test", testrunner="test_runner.tests.MockTestRunner") + self.assertEqual(stderr.getvalue(), "") + + @mock.patch.dict(os.environ, {"DJANGO_TEST_PROCESSES": "invalid"}) + def test_django_test_processes_env_non_int(self, *mocked_objects): + with self.assertRaises(ValueError): + call_command( + "test", + "--parallel", + testrunner="test_runner.tests.MockTestRunner", + ) + + @mock.patch.dict(os.environ, {"DJANGO_TEST_PROCESSES": "7"}) + def test_django_test_processes_parallel_default(self, *mocked_objects): + for parallel in ["--parallel", "--parallel=auto"]: + with self.subTest(parallel=parallel): + with captured_stderr() as stderr: + call_command( + "test", + parallel, + testrunner="test_runner.tests.MockTestRunner", + ) + self.assertIn("parallel=7", stderr.getvalue()) + + +class CustomTestRunnerOptionsSettingsTests(AdminScriptTestCase): + """ + Custom runners can add command line arguments. The runner is specified + through a settings file. + """ + + def setUp(self): + super().setUp() + settings = { + "TEST_RUNNER": "'test_runner.runner.CustomOptionsTestRunner'", + } + self.write_settings("settings.py", sdict=settings) + + def test_default_options(self): + args = ["test", "--settings=test_project.settings"] + out, err = self.run_django_admin(args) + self.assertNoOutput(err) + self.assertOutput(out, "1:2:3") + + def test_default_and_given_options(self): + args = ["test", "--settings=test_project.settings", "--option_b=foo"] + out, err = self.run_django_admin(args) + self.assertNoOutput(err) + self.assertOutput(out, "1:foo:3") + + def test_option_name_and_value_separated(self): + args = ["test", "--settings=test_project.settings", "--option_b", "foo"] + out, err = self.run_django_admin(args) + self.assertNoOutput(err) + self.assertOutput(out, "1:foo:3") + + def test_all_options_given(self): + args = [ + "test", + "--settings=test_project.settings", + "--option_a=bar", + "--option_b=foo", + "--option_c=31337", + ] + out, err = self.run_django_admin(args) + self.assertNoOutput(err) + self.assertOutput(out, "bar:foo:31337") + + +class CustomTestRunnerOptionsCmdlineTests(AdminScriptTestCase): + """ + Custom runners can add command line arguments when the runner is specified + using --testrunner. + """ + + def setUp(self): + super().setUp() + self.write_settings("settings.py") + + def test_testrunner_option(self): + args = [ + "test", + "--testrunner", + "test_runner.runner.CustomOptionsTestRunner", + "--option_a=bar", + "--option_b=foo", + "--option_c=31337", + ] + out, err = self.run_django_admin(args, "test_project.settings") + self.assertNoOutput(err) + self.assertOutput(out, "bar:foo:31337") + + def test_testrunner_equals(self): + args = [ + "test", + "--testrunner=test_runner.runner.CustomOptionsTestRunner", + "--option_a=bar", + "--option_b=foo", + "--option_c=31337", + ] + out, err = self.run_django_admin(args, "test_project.settings") + self.assertNoOutput(err) + self.assertOutput(out, "bar:foo:31337") + + def test_no_testrunner(self): + args = ["test", "--testrunner"] + out, err = self.run_django_admin(args, "test_project.settings") + self.assertIn("usage", err) + self.assertNotIn("Traceback", err) + self.assertNoOutput(out) + + +class NoInitializeSuiteTestRunnerTests(SimpleTestCase): + @mock.patch.object(multiprocessing, "get_start_method", return_value="spawn") + @mock.patch( + "django.test.runner.ParallelTestSuite.initialize_suite", + side_effect=Exception("initialize_suite() is called."), + ) + def test_no_initialize_suite_test_runner(self, *mocked_objects): + """ + The test suite's initialize_suite() method must always be called when + using spawn. It cannot rely on a test runner implementation. + """ + + class NoInitializeSuiteTestRunner(DiscoverRunner): + def setup_test_environment(self, **kwargs): + return + + def setup_databases(self, **kwargs): + return + + def run_checks(self, databases): + return + + def teardown_databases(self, old_config, **kwargs): + return + + def teardown_test_environment(self, **kwargs): + return + + def run_suite(self, suite, **kwargs): + kwargs = self.get_test_runner_kwargs() + runner = self.test_runner(**kwargs) + return runner.run(suite) + + with self.assertRaisesMessage(Exception, "initialize_suite() is called."): + runner = NoInitializeSuiteTestRunner( + verbosity=0, interactive=False, parallel=2 + ) + runner.run_tests( + [ + "test_runner_apps.sample.tests_sample.TestDjangoTestCase", + "test_runner_apps.simple.tests", + ] + ) + + +class TestRunnerInitializerTests(SimpleTestCase): + # Raise an exception to don't actually run tests. + @mock.patch.object( + multiprocessing, "Pool", side_effect=Exception("multiprocessing.Pool()") + ) + def test_no_initialize_suite_test_runner(self, mocked_pool): + class StubTestRunner(DiscoverRunner): + def setup_test_environment(self, **kwargs): + return + + def setup_databases(self, **kwargs): + return + + def run_checks(self, databases): + return + + def teardown_databases(self, old_config, **kwargs): + return + + def teardown_test_environment(self, **kwargs): + return + + def run_suite(self, suite, **kwargs): + kwargs = self.get_test_runner_kwargs() + runner = self.test_runner(**kwargs) + return runner.run(suite) + + runner = StubTestRunner( + verbosity=0, interactive=False, parallel=2, debug_mode=True + ) + with self.assertRaisesMessage(Exception, "multiprocessing.Pool()"): + runner.run_tests( + [ + "test_runner_apps.sample.tests_sample.TestDjangoTestCase", + "test_runner_apps.simple.tests", + ] + ) + # Initializer must be a function. + self.assertIs(mocked_pool.call_args.kwargs["initializer"], _init_worker) + initargs = mocked_pool.call_args.kwargs["initargs"] + self.assertEqual(len(initargs), 6) + self.assertEqual(initargs[5], True) # debug_mode + + +class Ticket17477RegressionTests(AdminScriptTestCase): + def setUp(self): + super().setUp() + self.write_settings("settings.py") + + def test_ticket_17477(self): + """'manage.py help test' works after r16352.""" + args = ["help", "test"] + out, err = self.run_manage(args) + self.assertNoOutput(err) + + +class SQLiteInMemoryTestDbs(TransactionTestCase): + available_apps = ["test_runner"] + databases = {"default", "other"} + + @unittest.skipUnless( + all(db.connections[conn].vendor == "sqlite" for conn in db.connections), + "This is an sqlite-specific issue", + ) + def test_transaction_support(self): + # Assert connections mocking is appropriately applied by preventing + # any attempts at calling create_test_db on the global connection + # objects. + for connection in db.connections.all(): + create_test_db = mock.patch.object( + connection.creation, + "create_test_db", + side_effect=AssertionError( + "Global connection object shouldn't be manipulated." + ), + ) + create_test_db.start() + self.addCleanup(create_test_db.stop) + for option_key, option_value in ( + ("NAME", ":memory:"), + ("TEST", {"NAME": ":memory:"}), + ): + tested_connections = db.ConnectionHandler( + { + "default": { + "ENGINE": "django.db.backends.sqlite3", + option_key: option_value, + }, + "other": { + "ENGINE": "django.db.backends.sqlite3", + option_key: option_value, + }, + } + ) + with mock.patch("django.test.utils.connections", new=tested_connections): + other = tested_connections["other"] + DiscoverRunner(verbosity=0).setup_databases() + msg = ( + "DATABASES setting '%s' option set to sqlite3's ':memory:' value " + "shouldn't interfere with transaction support detection." + % option_key + ) + # Transaction support is properly initialized for the 'other' DB. + self.assertTrue(other.features.supports_transactions, msg) + # And all the DBs report that they support transactions. + self.assertTrue(connections_support_transactions(), msg) + + +class DummyBackendTest(unittest.TestCase): + def test_setup_databases(self): + """ + setup_databases() doesn't fail with dummy database backend. + """ + tested_connections = db.ConnectionHandler({}) + with mock.patch("django.test.utils.connections", new=tested_connections): + runner_instance = DiscoverRunner(verbosity=0) + old_config = runner_instance.setup_databases() + runner_instance.teardown_databases(old_config) + + +class AliasedDefaultTestSetupTest(unittest.TestCase): + def test_setup_aliased_default_database(self): + """ + setup_databases() doesn't fail when 'default' is aliased + """ + tested_connections = db.ConnectionHandler( + {"default": {"NAME": "dummy"}, "aliased": {"NAME": "dummy"}} + ) + with mock.patch("django.test.utils.connections", new=tested_connections): + runner_instance = DiscoverRunner(verbosity=0) + old_config = runner_instance.setup_databases() + runner_instance.teardown_databases(old_config) + + +class SetupDatabasesTests(unittest.TestCase): + def setUp(self): + self.runner_instance = DiscoverRunner(verbosity=0) + + def test_setup_aliased_databases(self): + tested_connections = db.ConnectionHandler( + { + "default": { + "ENGINE": "django.db.backends.dummy", + "NAME": "dbname", + }, + "other": { + "ENGINE": "django.db.backends.dummy", + "NAME": "dbname", + }, + } + ) + + with mock.patch( + "django.db.backends.dummy.base.DatabaseWrapper.creation_class" + ) as mocked_db_creation: + with mock.patch("django.test.utils.connections", new=tested_connections): + old_config = self.runner_instance.setup_databases() + self.runner_instance.teardown_databases(old_config) + mocked_db_creation.return_value.destroy_test_db.assert_called_once_with( + "dbname", 0, False + ) + + def test_setup_test_database_aliases(self): + """ + The default database must be the first because data migrations + use the default alias by default. + """ + tested_connections = db.ConnectionHandler( + { + "other": { + "ENGINE": "django.db.backends.dummy", + "NAME": "dbname", + }, + "default": { + "ENGINE": "django.db.backends.dummy", + "NAME": "dbname", + }, + } + ) + with mock.patch("django.test.utils.connections", new=tested_connections): + test_databases, _ = get_unique_databases_and_mirrors() + self.assertEqual( + test_databases, + { + ("", "", "django.db.backends.dummy", "test_dbname"): ( + "dbname", + ["default", "other"], + ), + }, + ) + + def test_destroy_test_db_restores_db_name(self): + tested_connections = db.ConnectionHandler( + { + "default": { + "ENGINE": settings.DATABASES[db.DEFAULT_DB_ALIAS]["ENGINE"], + "NAME": "xxx_test_database", + }, + } + ) + # Using the real current name as old_name to not mess with the test suite. + old_name = settings.DATABASES[db.DEFAULT_DB_ALIAS]["NAME"] + with mock.patch("django.db.connections", new=tested_connections): + tested_connections["default"].creation.destroy_test_db( + old_name, verbosity=0, keepdb=True + ) + self.assertEqual( + tested_connections["default"].settings_dict["NAME"], old_name + ) + + def test_serialization(self): + tested_connections = db.ConnectionHandler( + { + "default": { + "ENGINE": "django.db.backends.dummy", + }, + } + ) + with mock.patch( + "django.db.backends.dummy.base.DatabaseWrapper.creation_class" + ) as mocked_db_creation: + with mock.patch("django.test.utils.connections", new=tested_connections): + self.runner_instance.setup_databases() + mocked_db_creation.return_value.create_test_db.assert_called_once_with( + verbosity=0, autoclobber=False, serialize=True, keepdb=False + ) + + +@skipUnlessDBFeature("supports_sequence_reset") +class AutoIncrementResetTest(TransactionTestCase): + """ + Creating the same models in different test methods receive the same PK + values since the sequences are reset before each test method. + """ + + available_apps = ["test_runner"] + + reset_sequences = True + + def _test(self): + # Regular model + p = Person.objects.create(first_name="Jack", last_name="Smith") + self.assertEqual(p.pk, 1) + # Auto-created many-to-many through model + p.friends.add(Person.objects.create(first_name="Jacky", last_name="Smith")) + self.assertEqual(p.friends.through.objects.first().pk, 1) + # Many-to-many through model + b = B.objects.create() + t = Through.objects.create(person=p, b=b) + self.assertEqual(t.pk, 1) + + def test_autoincrement_reset1(self): + self._test() + + def test_autoincrement_reset2(self): + self._test() + + +class EmptyDefaultDatabaseTest(unittest.TestCase): + def test_empty_default_database(self): + """ + An empty default database in settings does not raise an ImproperlyConfigured + error when running a unit test that does not use a database. + """ + tested_connections = db.ConnectionHandler({"default": {}}) + with mock.patch("django.db.connections", new=tested_connections): + connection = tested_connections[db.utils.DEFAULT_DB_ALIAS] + self.assertEqual( + connection.settings_dict["ENGINE"], "django.db.backends.dummy" + ) + connections_support_transactions() + + +class RunTestsExceptionHandlingTests(unittest.TestCase): + def test_run_checks_raises(self): + """ + Teardown functions are run when run_checks() raises SystemCheckError. + """ + with mock.patch( + "django.test.runner.DiscoverRunner.setup_test_environment" + ), mock.patch("django.test.runner.DiscoverRunner.setup_databases"), mock.patch( + "django.test.runner.DiscoverRunner.build_suite" + ), mock.patch( + "django.test.runner.DiscoverRunner.run_checks", side_effect=SystemCheckError + ), mock.patch( + "django.test.runner.DiscoverRunner.teardown_databases" + ) as teardown_databases, mock.patch( + "django.test.runner.DiscoverRunner.teardown_test_environment" + ) as teardown_test_environment: + runner = DiscoverRunner(verbosity=0, interactive=False) + with self.assertRaises(SystemCheckError): + runner.run_tests( + ["test_runner_apps.sample.tests_sample.TestDjangoTestCase"] + ) + self.assertTrue(teardown_databases.called) + self.assertTrue(teardown_test_environment.called) + + def test_run_checks_raises_and_teardown_raises(self): + """ + SystemCheckError is surfaced when run_checks() raises SystemCheckError + and teardown databases() raises ValueError. + """ + with mock.patch( + "django.test.runner.DiscoverRunner.setup_test_environment" + ), mock.patch("django.test.runner.DiscoverRunner.setup_databases"), mock.patch( + "django.test.runner.DiscoverRunner.build_suite" + ), mock.patch( + "django.test.runner.DiscoverRunner.run_checks", side_effect=SystemCheckError + ), mock.patch( + "django.test.runner.DiscoverRunner.teardown_databases", + side_effect=ValueError, + ) as teardown_databases, mock.patch( + "django.test.runner.DiscoverRunner.teardown_test_environment" + ) as teardown_test_environment: + runner = DiscoverRunner(verbosity=0, interactive=False) + with self.assertRaises(SystemCheckError): + runner.run_tests( + ["test_runner_apps.sample.tests_sample.TestDjangoTestCase"] + ) + self.assertTrue(teardown_databases.called) + self.assertFalse(teardown_test_environment.called) + + def test_run_checks_passes_and_teardown_raises(self): + """ + Exceptions on teardown are surfaced if no exceptions happen during + run_checks(). + """ + with mock.patch( + "django.test.runner.DiscoverRunner.setup_test_environment" + ), mock.patch("django.test.runner.DiscoverRunner.setup_databases"), mock.patch( + "django.test.runner.DiscoverRunner.build_suite" + ), mock.patch( + "django.test.runner.DiscoverRunner.run_checks" + ), mock.patch( + "django.test.runner.DiscoverRunner.teardown_databases", + side_effect=ValueError, + ) as teardown_databases, mock.patch( + "django.test.runner.DiscoverRunner.teardown_test_environment" + ) as teardown_test_environment: + runner = DiscoverRunner(verbosity=0, interactive=False) + with self.assertRaises(ValueError): + # Suppress the output when running TestDjangoTestCase. + with mock.patch("sys.stderr"): + runner.run_tests( + ["test_runner_apps.sample.tests_sample.TestDjangoTestCase"] + ) + self.assertTrue(teardown_databases.called) + self.assertFalse(teardown_test_environment.called) diff --git a/testbed/django__django/tests/test_runner_apps/__init__.py b/testbed/django__django/tests/test_runner_apps/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_runner_apps/buffer/tests_buffer.py b/testbed/django__django/tests/test_runner_apps/buffer/tests_buffer.py new file mode 100644 index 0000000000000000000000000000000000000000..00d959cd493e74532955149beec1a71354ed359c --- /dev/null +++ b/testbed/django__django/tests/test_runner_apps/buffer/tests_buffer.py @@ -0,0 +1,14 @@ +import sys +from unittest import TestCase + + +class WriteToStdoutStderrTestCase(TestCase): + def test_pass(self): + sys.stderr.write("Write to stderr.") + sys.stdout.write("Write to stdout.") + self.assertTrue(True) + + def test_fail(self): + sys.stderr.write("Write to stderr.") + sys.stdout.write("Write to stdout.") + self.assertTrue(False) diff --git a/testbed/django__django/tests/test_runner_apps/databases/__init__.py b/testbed/django__django/tests/test_runner_apps/databases/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_runner_apps/databases/tests.py b/testbed/django__django/tests/test_runner_apps/databases/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..b3c85564a197039d5578fd947ea343e7a9a766c3 --- /dev/null +++ b/testbed/django__django/tests/test_runner_apps/databases/tests.py @@ -0,0 +1,23 @@ +import unittest + + +class NoDatabaseTests(unittest.TestCase): + def test_nothing(self): + pass + + +class DefaultDatabaseTests(NoDatabaseTests): + databases = {"default"} + + +class DefaultDatabaseSerializedTests(NoDatabaseTests): + databases = {"default"} + serialized_rollback = True + + +class OtherDatabaseTests(NoDatabaseTests): + databases = {"other"} + + +class AllDatabasesTests(NoDatabaseTests): + databases = "__all__" diff --git a/testbed/django__django/tests/test_runner_apps/failures/__init__.py b/testbed/django__django/tests/test_runner_apps/failures/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_runner_apps/failures/tests_failures.py b/testbed/django__django/tests/test_runner_apps/failures/tests_failures.py new file mode 100644 index 0000000000000000000000000000000000000000..dad875b8a61d2994fba60c8676e98b24103652ba --- /dev/null +++ b/testbed/django__django/tests/test_runner_apps/failures/tests_failures.py @@ -0,0 +1,23 @@ +from unittest import TestCase, expectedFailure + + +class FailureTestCase(TestCase): + def test_sample(self): + self.assertEqual(0, 1) + + +class ErrorTestCase(TestCase): + def test_sample(self): + raise Exception("test") + + +class ExpectedFailureTestCase(TestCase): + @expectedFailure + def test_sample(self): + self.assertEqual(0, 1) + + +class UnexpectedSuccessTestCase(TestCase): + @expectedFailure + def test_sample(self): + self.assertEqual(1, 1) diff --git a/testbed/django__django/tests/test_runner_apps/sample/__init__.py b/testbed/django__django/tests/test_runner_apps/sample/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_runner_apps/sample/doctests.py b/testbed/django__django/tests/test_runner_apps/sample/doctests.py new file mode 100644 index 0000000000000000000000000000000000000000..a657d9bdf30aded07f62d90949c7eae91113b50e --- /dev/null +++ b/testbed/django__django/tests/test_runner_apps/sample/doctests.py @@ -0,0 +1,47 @@ +""" +Doctest example from the official Python documentation. +https://docs.python.org/library/doctest.html +""" + + +def factorial(n): + """Return the factorial of n, an exact integer >= 0. + + >>> [factorial(n) for n in range(6)] + [1, 1, 2, 6, 24, 120] + >>> factorial(30) # doctest: +ELLIPSIS + 265252859812191058636308480000000... + >>> factorial(-1) + Traceback (most recent call last): + ... + ValueError: n must be >= 0 + + Factorials of floats are OK, but the float must be an exact integer: + >>> factorial(30.1) + Traceback (most recent call last): + ... + ValueError: n must be exact integer + >>> factorial(30.0) # doctest: +ELLIPSIS + 265252859812191058636308480000000... + + It must also not be ridiculously large: + >>> factorial(1e100) + Traceback (most recent call last): + ... + OverflowError: n too large + """ + + import math + + if not n >= 0: + raise ValueError("n must be >= 0") + if math.floor(n) != n: + raise ValueError("n must be exact integer") + if n + 1 == n: # catch a value like 1e300 + raise OverflowError("n too large") + result = 1 + factor = 2 + while factor <= n: + result *= factor + factor += 1 + return result diff --git a/testbed/django__django/tests/test_runner_apps/sample/empty.py b/testbed/django__django/tests/test_runner_apps/sample/empty.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_runner_apps/sample/pattern_tests.py b/testbed/django__django/tests/test_runner_apps/sample/pattern_tests.py new file mode 100644 index 0000000000000000000000000000000000000000..4500eacb8ee100d4a46b2375ce637197f65870d0 --- /dev/null +++ b/testbed/django__django/tests/test_runner_apps/sample/pattern_tests.py @@ -0,0 +1,6 @@ +from unittest import TestCase + + +class Test(TestCase): + def test_sample(self): + self.assertEqual(1, 1) diff --git a/testbed/django__django/tests/test_runner_apps/sample/tests/__init__.py b/testbed/django__django/tests/test_runner_apps/sample/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_runner_apps/sample/tests/tests.py b/testbed/django__django/tests/test_runner_apps/sample/tests/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..a9faf514f2cb83ce498c49220f51d8e3769f1400 --- /dev/null +++ b/testbed/django__django/tests/test_runner_apps/sample/tests/tests.py @@ -0,0 +1,6 @@ +from unittest import TestCase + + +class Test(TestCase): + def test_sample(self): + pass diff --git a/testbed/django__django/tests/test_runner_apps/sample/tests_sample.py b/testbed/django__django/tests/test_runner_apps/sample/tests_sample.py new file mode 100644 index 0000000000000000000000000000000000000000..6e876ebd41f2071a7cfa4d9d701f323efa38e02f --- /dev/null +++ b/testbed/django__django/tests/test_runner_apps/sample/tests_sample.py @@ -0,0 +1,33 @@ +import doctest +from unittest import TestCase + +from django.test import SimpleTestCase +from django.test import TestCase as DjangoTestCase + +from . import doctests + + +class TestVanillaUnittest(TestCase): + def test_sample(self): + self.assertEqual(1, 1) + + +class TestDjangoTestCase(DjangoTestCase): + def test_sample(self): + self.assertEqual(1, 1) + + +class TestZimpleTestCase(SimpleTestCase): + # Z is used to trick this test case to appear after Vanilla in default suite + + def test_sample(self): + self.assertEqual(1, 1) + + +class EmptyTestCase(TestCase): + pass + + +def load_tests(loader, tests, ignore): + tests.addTests(doctest.DocTestSuite(doctests)) + return tests diff --git a/testbed/django__django/tests/test_runner_apps/simple/__init__.py b/testbed/django__django/tests/test_runner_apps/simple/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_runner_apps/simple/tests.py b/testbed/django__django/tests/test_runner_apps/simple/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..62611bee3228b2e273dbc4c4be4a2e865330d0ed --- /dev/null +++ b/testbed/django__django/tests/test_runner_apps/simple/tests.py @@ -0,0 +1,55 @@ +from unittest import TestCase + +from django.test import SimpleTestCase +from django.test import TestCase as DjangoTestCase + + +class DjangoCase1(DjangoTestCase): + def test_1(self): + pass + + def test_2(self): + pass + + +class DjangoCase2(DjangoTestCase): + def test_1(self): + pass + + def test_2(self): + pass + + +class SimpleCase1(SimpleTestCase): + def test_1(self): + pass + + def test_2(self): + pass + + +class SimpleCase2(SimpleTestCase): + def test_1(self): + pass + + def test_2(self): + pass + + +class UnittestCase1(TestCase): + def test_1(self): + pass + + def test_2(self): + pass + + +class UnittestCase2(TestCase): + def test_1(self): + pass + + def test_2(self): + pass + + def test_3_test(self): + pass diff --git a/testbed/django__django/tests/test_runner_apps/tagged/__init__.py b/testbed/django__django/tests/test_runner_apps/tagged/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_runner_apps/tagged/tests.py b/testbed/django__django/tests/test_runner_apps/tagged/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..7b61b9321e4b7d16b123c03d32f737553671fa0b --- /dev/null +++ b/testbed/django__django/tests/test_runner_apps/tagged/tests.py @@ -0,0 +1,14 @@ +from unittest import TestCase + +from django.test import tag + + +@tag("slow") +class TaggedTestCase(TestCase): + @tag("fast") + def test_single_tag(self): + self.assertEqual(1, 1) + + @tag("fast", "core") + def test_multiple_tags(self): + self.assertEqual(1, 1) diff --git a/testbed/django__django/tests/test_runner_apps/tagged/tests_inheritance.py b/testbed/django__django/tests/test_runner_apps/tagged/tests_inheritance.py new file mode 100644 index 0000000000000000000000000000000000000000..80545c9125739e222049be2f9be5d981119f01bc --- /dev/null +++ b/testbed/django__django/tests/test_runner_apps/tagged/tests_inheritance.py @@ -0,0 +1,27 @@ +from unittest import TestCase + +from django.test import tag + + +@tag("foo") +class FooBase(TestCase): + pass + + +class Foo(FooBase): + def test_no_new_tags(self): + pass + + @tag("baz") + def test_new_func_tag(self): + pass + + +@tag("bar") +class FooBar(FooBase): + def test_new_class_tag_only(self): + pass + + @tag("baz") + def test_new_class_and_func_tags(self): + pass diff --git a/testbed/django__django/tests/test_runner_apps/tagged/tests_syntax_error.py b/testbed/django__django/tests/test_runner_apps/tagged/tests_syntax_error.py new file mode 100644 index 0000000000000000000000000000000000000000..2714768a308548270c46c7d92954db2e23de5663 --- /dev/null +++ b/testbed/django__django/tests/test_runner_apps/tagged/tests_syntax_error.py @@ -0,0 +1,11 @@ +from unittest import TestCase + +from django.test import tag + + +@tag('syntax_error') +class SyntaxErrorTestCase(TestCase): + pass + + +1syntax_error # NOQA diff --git a/testbed/django__django/tests/test_sqlite.py b/testbed/django__django/tests/test_sqlite.py new file mode 100644 index 0000000000000000000000000000000000000000..f3ccf2c021770c7d1d5138da26fed2a91c0bed3a --- /dev/null +++ b/testbed/django__django/tests/test_sqlite.py @@ -0,0 +1,33 @@ +# This is an example test settings file for use with the Django test suite. +# +# The 'sqlite3' backend requires only the ENGINE setting (an in- +# memory database will be used). All other backends will require a +# NAME and potentially authentication information. See the +# following section in the docs for more information: +# +# https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/ +# +# The different databases that Django supports behave differently in certain +# situations, so it is recommended to run the test suite against as many +# database backends as possible. You may want to create a separate settings +# file for each of the backends you test against. + +DATABASES = { + "default": { + "ENGINE": "django.db.backends.sqlite3", + }, + "other": { + "ENGINE": "django.db.backends.sqlite3", + }, +} + +SECRET_KEY = "django_tests_secret_key" + +# Use a fast hasher to speed up tests. +PASSWORD_HASHERS = [ + "django.contrib.auth.hashers.MD5PasswordHasher", +] + +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" + +USE_TZ = False diff --git a/testbed/django__django/tests/test_utils/__init__.py b/testbed/django__django/tests/test_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_utils/fixtures/should_not_be_loaded.json b/testbed/django__django/tests/test_utils/fixtures/should_not_be_loaded.json new file mode 100644 index 0000000000000000000000000000000000000000..9f4df38996db15f93f43dd92ab90a62f04d4b36d --- /dev/null +++ b/testbed/django__django/tests/test_utils/fixtures/should_not_be_loaded.json @@ -0,0 +1,10 @@ +[ + { + "pk": 1, + "model": "test_utils.person", + "fields": { + "name": "Elvis Presley" + } + } +] + diff --git a/testbed/django__django/tests/test_utils/models.py b/testbed/django__django/tests/test_utils/models.py new file mode 100644 index 0000000000000000000000000000000000000000..4d91c03a03904d3becf4fb2a00c763b4d76efd96 --- /dev/null +++ b/testbed/django__django/tests/test_utils/models.py @@ -0,0 +1,18 @@ +from django.db import models + + +class Car(models.Model): + name = models.CharField(max_length=100) + + +class Person(models.Model): + name = models.CharField(max_length=100) + cars = models.ManyToManyField(Car, through="PossessedCar") + data = models.BinaryField(null=True) + + +class PossessedCar(models.Model): + car = models.ForeignKey(Car, models.CASCADE) + belongs_to = models.ForeignKey( + Person, models.CASCADE, related_name="possessed_cars" + ) diff --git a/testbed/django__django/tests/test_utils/templates/template_used/alternative.html b/testbed/django__django/tests/test_utils/templates/template_used/alternative.html new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_utils/templates/template_used/base.html b/testbed/django__django/tests/test_utils/templates/template_used/base.html new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/test_utils/templates/template_used/extends.html b/testbed/django__django/tests/test_utils/templates/template_used/extends.html new file mode 100644 index 0000000000000000000000000000000000000000..d14bfa27790e5683c18c054bf28d47c0247119cc --- /dev/null +++ b/testbed/django__django/tests/test_utils/templates/template_used/extends.html @@ -0,0 +1 @@ +{% extends "template_used/base.html" %} diff --git a/testbed/django__django/tests/test_utils/templates/template_used/include.html b/testbed/django__django/tests/test_utils/templates/template_used/include.html new file mode 100644 index 0000000000000000000000000000000000000000..2d6c954f3843fa189de12cbeb520c6c582917723 --- /dev/null +++ b/testbed/django__django/tests/test_utils/templates/template_used/include.html @@ -0,0 +1 @@ +{% include "template_used/base.html" %} diff --git a/testbed/django__django/tests/test_utils/test_serializemixin.py b/testbed/django__django/tests/test_utils/test_serializemixin.py new file mode 100644 index 0000000000000000000000000000000000000000..e15334a867c0ba0b1896835a958c001f221e5ae9 --- /dev/null +++ b/testbed/django__django/tests/test_utils/test_serializemixin.py @@ -0,0 +1,22 @@ +from django.test import SimpleTestCase +from django.test.testcases import SerializeMixin + + +class TestSerializeMixin(SimpleTestCase): + def test_init_without_lockfile(self): + msg = ( + "ExampleTests.lockfile isn't set. Set it to a unique value in the " + "base class." + ) + with self.assertRaisesMessage(ValueError, msg): + + class ExampleTests(SerializeMixin, SimpleTestCase): + pass + + +class TestSerializeMixinUse(SerializeMixin, SimpleTestCase): + lockfile = __file__ + + def test_usage(self): + # Running this test ensures that the lock/unlock functions have passed. + pass diff --git a/testbed/django__django/tests/test_utils/test_simpletestcase.py b/testbed/django__django/tests/test_utils/test_simpletestcase.py new file mode 100644 index 0000000000000000000000000000000000000000..c6aa31494445423cdf9250b55b2e3caa45220016 --- /dev/null +++ b/testbed/django__django/tests/test_utils/test_simpletestcase.py @@ -0,0 +1,147 @@ +import unittest +from io import StringIO +from unittest import mock +from unittest.suite import _DebugResult + +from django.test import SimpleTestCase + + +class ErrorTestCase(SimpleTestCase): + def raising_test(self): + self._pre_setup.assert_called_once_with() + raise Exception("debug() bubbles up exceptions before cleanup.") + + def simple_test(self): + self._pre_setup.assert_called_once_with() + + @unittest.skip("Skip condition.") + def skipped_test(self): + pass + + +@mock.patch.object(ErrorTestCase, "_post_teardown") +@mock.patch.object(ErrorTestCase, "_pre_setup") +class DebugInvocationTests(SimpleTestCase): + def get_runner(self): + return unittest.TextTestRunner(stream=StringIO()) + + def isolate_debug_test(self, test_suite, result): + # Suite teardown needs to be manually called to isolate failures. + test_suite._tearDownPreviousClass(None, result) + test_suite._handleModuleTearDown(result) + + def test_run_cleanup(self, _pre_setup, _post_teardown): + """Simple test run: catches errors and runs cleanup.""" + test_suite = unittest.TestSuite() + test_suite.addTest(ErrorTestCase("raising_test")) + result = self.get_runner()._makeResult() + self.assertEqual(result.errors, []) + test_suite.run(result) + self.assertEqual(len(result.errors), 1) + _, traceback = result.errors[0] + self.assertIn( + "Exception: debug() bubbles up exceptions before cleanup.", traceback + ) + _pre_setup.assert_called_once_with() + _post_teardown.assert_called_once_with() + + def test_run_pre_setup_error(self, _pre_setup, _post_teardown): + _pre_setup.side_effect = Exception("Exception in _pre_setup.") + test_suite = unittest.TestSuite() + test_suite.addTest(ErrorTestCase("simple_test")) + result = self.get_runner()._makeResult() + self.assertEqual(result.errors, []) + test_suite.run(result) + self.assertEqual(len(result.errors), 1) + _, traceback = result.errors[0] + self.assertIn("Exception: Exception in _pre_setup.", traceback) + # pre-setup is called but not post-teardown. + _pre_setup.assert_called_once_with() + self.assertFalse(_post_teardown.called) + + def test_run_post_teardown_error(self, _pre_setup, _post_teardown): + _post_teardown.side_effect = Exception("Exception in _post_teardown.") + test_suite = unittest.TestSuite() + test_suite.addTest(ErrorTestCase("simple_test")) + result = self.get_runner()._makeResult() + self.assertEqual(result.errors, []) + test_suite.run(result) + self.assertEqual(len(result.errors), 1) + _, traceback = result.errors[0] + self.assertIn("Exception: Exception in _post_teardown.", traceback) + # pre-setup and post-teardwn are called. + _pre_setup.assert_called_once_with() + _post_teardown.assert_called_once_with() + + def test_run_skipped_test_no_cleanup(self, _pre_setup, _post_teardown): + test_suite = unittest.TestSuite() + test_suite.addTest(ErrorTestCase("skipped_test")) + try: + test_suite.run(self.get_runner()._makeResult()) + except unittest.SkipTest: + self.fail("SkipTest should not be raised at this stage.") + self.assertFalse(_post_teardown.called) + self.assertFalse(_pre_setup.called) + + def test_debug_cleanup(self, _pre_setup, _post_teardown): + """Simple debug run without errors.""" + test_suite = unittest.TestSuite() + test_suite.addTest(ErrorTestCase("simple_test")) + test_suite.debug() + _pre_setup.assert_called_once_with() + _post_teardown.assert_called_once_with() + + def test_debug_bubbles_error(self, _pre_setup, _post_teardown): + """debug() bubbles up exceptions before cleanup.""" + test_suite = unittest.TestSuite() + test_suite.addTest(ErrorTestCase("raising_test")) + msg = "debug() bubbles up exceptions before cleanup." + with self.assertRaisesMessage(Exception, msg): + # This is the same as test_suite.debug(). + result = _DebugResult() + test_suite.run(result, debug=True) + # pre-setup is called but not post-teardown. + _pre_setup.assert_called_once_with() + self.assertFalse(_post_teardown.called) + self.isolate_debug_test(test_suite, result) + + def test_debug_bubbles_pre_setup_error(self, _pre_setup, _post_teardown): + """debug() bubbles up exceptions during _pre_setup.""" + msg = "Exception in _pre_setup." + _pre_setup.side_effect = Exception(msg) + test_suite = unittest.TestSuite() + test_suite.addTest(ErrorTestCase("simple_test")) + with self.assertRaisesMessage(Exception, msg): + # This is the same as test_suite.debug(). + result = _DebugResult() + test_suite.run(result, debug=True) + # pre-setup is called but not post-teardown. + _pre_setup.assert_called_once_with() + self.assertFalse(_post_teardown.called) + self.isolate_debug_test(test_suite, result) + + def test_debug_bubbles_post_teardown_error(self, _pre_setup, _post_teardown): + """debug() bubbles up exceptions during _post_teardown.""" + msg = "Exception in _post_teardown." + _post_teardown.side_effect = Exception(msg) + test_suite = unittest.TestSuite() + test_suite.addTest(ErrorTestCase("simple_test")) + with self.assertRaisesMessage(Exception, msg): + # This is the same as test_suite.debug(). + result = _DebugResult() + test_suite.run(result, debug=True) + # pre-setup and post-teardwn are called. + _pre_setup.assert_called_once_with() + _post_teardown.assert_called_once_with() + self.isolate_debug_test(test_suite, result) + + def test_debug_skipped_test_no_cleanup(self, _pre_setup, _post_teardown): + test_suite = unittest.TestSuite() + test_suite.addTest(ErrorTestCase("skipped_test")) + with self.assertRaisesMessage(unittest.SkipTest, "Skip condition."): + # This is the same as test_suite.debug(). + result = _DebugResult() + test_suite.run(result, debug=True) + self.assertFalse(_post_teardown.called) + self.assertFalse(_pre_setup.called) + self.isolate_debug_test(test_suite, result) diff --git a/testbed/django__django/tests/test_utils/test_testcase.py b/testbed/django__django/tests/test_utils/test_testcase.py new file mode 100644 index 0000000000000000000000000000000000000000..eb6ca80036936ee5e0bf87f96917334f0327c716 --- /dev/null +++ b/testbed/django__django/tests/test_utils/test_testcase.py @@ -0,0 +1,150 @@ +from functools import wraps + +from django.db import IntegrityError, connections, transaction +from django.test import TestCase, skipUnlessDBFeature +from django.test.testcases import DatabaseOperationForbidden, TestData + +from .models import Car, Person, PossessedCar + + +class TestTestCase(TestCase): + @skipUnlessDBFeature("can_defer_constraint_checks") + @skipUnlessDBFeature("supports_foreign_keys") + def test_fixture_teardown_checks_constraints(self): + rollback_atomics = self._rollback_atomics + self._rollback_atomics = lambda connection: None # noop + try: + car = PossessedCar.objects.create(car_id=1, belongs_to_id=1) + with self.assertRaises(IntegrityError), transaction.atomic(): + self._fixture_teardown() + car.delete() + finally: + self._rollback_atomics = rollback_atomics + + def test_disallowed_database_connection(self): + message = ( + "Database connections to 'other' are not allowed in this test. " + "Add 'other' to test_utils.test_testcase.TestTestCase.databases to " + "ensure proper test isolation and silence this failure." + ) + with self.assertRaisesMessage(DatabaseOperationForbidden, message): + connections["other"].connect() + with self.assertRaisesMessage(DatabaseOperationForbidden, message): + connections["other"].temporary_connection() + + def test_disallowed_database_queries(self): + message = ( + "Database queries to 'other' are not allowed in this test. " + "Add 'other' to test_utils.test_testcase.TestTestCase.databases to " + "ensure proper test isolation and silence this failure." + ) + with self.assertRaisesMessage(DatabaseOperationForbidden, message): + Car.objects.using("other").get() + + @skipUnlessDBFeature("supports_transactions") + def test_reset_sequences(self): + old_reset_sequences = self.reset_sequences + self.reset_sequences = True + msg = "reset_sequences cannot be used on TestCase instances" + try: + with self.assertRaisesMessage(TypeError, msg): + self._fixture_setup() + finally: + self.reset_sequences = old_reset_sequences + + +def assert_no_queries(test): + @wraps(test) + def inner(self): + with self.assertNumQueries(0): + test(self) + + return inner + + +# On databases with no transaction support (for instance, MySQL with the MyISAM +# engine), setUpTestData() is called before each test, so there is no need to +# clone class level test data. +@skipUnlessDBFeature("supports_transactions") +class TestDataTests(TestCase): + # setUpTestData re-assignment are also wrapped in TestData. + jim_douglas = None + + @classmethod + def setUpTestData(cls): + cls.jim_douglas = Person.objects.create(name="Jim Douglas") + cls.car = Car.objects.create(name="1963 Volkswagen Beetle") + cls.herbie = cls.jim_douglas.possessed_cars.create( + car=cls.car, + belongs_to=cls.jim_douglas, + ) + + cls.person_binary = Person.objects.create(name="Person", data=b"binary data") + cls.person_binary_get = Person.objects.get(pk=cls.person_binary.pk) + + @assert_no_queries + def test_class_attribute_equality(self): + """Class level test data is equal to instance level test data.""" + self.assertEqual(self.jim_douglas, self.__class__.jim_douglas) + self.assertEqual(self.person_binary, self.__class__.person_binary) + self.assertEqual(self.person_binary_get, self.__class__.person_binary_get) + + @assert_no_queries + def test_class_attribute_identity(self): + """ + Class level test data is not identical to instance level test data. + """ + self.assertIsNot(self.jim_douglas, self.__class__.jim_douglas) + self.assertIsNot(self.person_binary, self.__class__.person_binary) + self.assertIsNot(self.person_binary_get, self.__class__.person_binary_get) + + @assert_no_queries + def test_binaryfield_data_type(self): + self.assertEqual(bytes(self.person_binary.data), b"binary data") + self.assertEqual(bytes(self.person_binary_get.data), b"binary data") + self.assertEqual( + type(self.person_binary_get.data), + type(self.__class__.person_binary_get.data), + ) + self.assertEqual( + type(self.person_binary.data), + type(self.__class__.person_binary.data), + ) + + @assert_no_queries + def test_identity_preservation(self): + """Identity of test data is preserved between accesses.""" + self.assertIs(self.jim_douglas, self.jim_douglas) + + @assert_no_queries + def test_known_related_objects_identity_preservation(self): + """Known related objects identity is preserved.""" + self.assertIs(self.herbie.car, self.car) + self.assertIs(self.herbie.belongs_to, self.jim_douglas) + + def test_repr(self): + self.assertEqual( + repr(TestData("attr", "value")), + "", + ) + + +class SetupTestDataIsolationTests(TestCase): + """ + In-memory data isolation is respected for model instances assigned to class + attributes during setUpTestData. + """ + + @classmethod + def setUpTestData(cls): + cls.car = Car.objects.create(name="Volkswagen Beetle") + + def test_book_name_deutsh(self): + self.assertEqual(self.car.name, "Volkswagen Beetle") + self.car.name = "VW sKäfer" + self.car.save() + + def test_book_name_french(self): + self.assertEqual(self.car.name, "Volkswagen Beetle") + self.car.name = "Volkswagen Coccinelle" + self.car.save() diff --git a/testbed/django__django/tests/test_utils/test_transactiontestcase.py b/testbed/django__django/tests/test_utils/test_transactiontestcase.py new file mode 100644 index 0000000000000000000000000000000000000000..0032e2ee0c5fcf6810eff1ee6ff6e39be10b2507 --- /dev/null +++ b/testbed/django__django/tests/test_utils/test_transactiontestcase.py @@ -0,0 +1,70 @@ +from unittest import mock + +from django.db import connections +from django.test import TestCase, TransactionTestCase, override_settings +from django.test.testcases import DatabaseOperationForbidden + +from .models import Car + + +class TestSerializedRollbackInhibitsPostMigrate(TransactionTestCase): + """ + TransactionTestCase._fixture_teardown() inhibits the post_migrate signal + for test classes with serialized_rollback=True. + """ + + available_apps = ["test_utils"] + serialized_rollback = True + + def setUp(self): + # self.available_apps must be None to test the serialized_rollback + # condition. + self.available_apps = None + + def tearDown(self): + self.available_apps = ["test_utils"] + + @mock.patch("django.test.testcases.call_command") + def test(self, call_command): + # with a mocked call_command(), this doesn't have any effect. + self._fixture_teardown() + call_command.assert_called_with( + "flush", + interactive=False, + allow_cascade=False, + reset_sequences=False, + inhibit_post_migrate=True, + database="default", + verbosity=0, + ) + + +@override_settings(DEBUG=True) # Enable query logging for test_queries_cleared +class TransactionTestCaseDatabasesTests(TestCase): + available_apps = [] + databases = {"default", "other"} + + def test_queries_cleared(self): + """ + TransactionTestCase._pre_setup() clears the connections' queries_log + so that it's less likely to overflow. An overflow causes + assertNumQueries() to fail. + """ + for alias in self.databases: + self.assertEqual( + len(connections[alias].queries_log), 0, "Failed for alias %s" % alias + ) + + +class DisallowedDatabaseQueriesTests(TransactionTestCase): + available_apps = ["test_utils"] + + def test_disallowed_database_queries(self): + message = ( + "Database queries to 'other' are not allowed in this test. " + "Add 'other' to test_utils.test_transactiontestcase." + "DisallowedDatabaseQueriesTests.databases to ensure proper test " + "isolation and silence this failure." + ) + with self.assertRaisesMessage(DatabaseOperationForbidden, message): + Car.objects.using("other").get() diff --git a/testbed/django__django/tests/test_utils/tests.py b/testbed/django__django/tests/test_utils/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..25218299b24ed7f2eda84fbc3b47635cc6c72873 --- /dev/null +++ b/testbed/django__django/tests/test_utils/tests.py @@ -0,0 +1,2180 @@ +import os +import unittest +import warnings +from io import StringIO +from unittest import mock + +from django.conf import STATICFILES_STORAGE_ALIAS, settings +from django.contrib.staticfiles.finders import get_finder, get_finders +from django.contrib.staticfiles.storage import staticfiles_storage +from django.core.exceptions import ImproperlyConfigured +from django.core.files.storage import default_storage +from django.db import ( + IntegrityError, + connection, + connections, + models, + router, + transaction, +) +from django.forms import ( + CharField, + EmailField, + Form, + IntegerField, + ValidationError, + formset_factory, +) +from django.http import HttpResponse +from django.template.loader import render_to_string +from django.test import ( + SimpleTestCase, + TestCase, + TransactionTestCase, + skipIfDBFeature, + skipUnlessDBFeature, +) +from django.test.html import HTMLParseError, parse_html +from django.test.testcases import DatabaseOperationForbidden +from django.test.utils import ( + CaptureQueriesContext, + TestContextDecorator, + ignore_warnings, + isolate_apps, + override_settings, + setup_test_environment, +) +from django.urls import NoReverseMatch, path, reverse, reverse_lazy +from django.utils.deprecation import RemovedInDjango51Warning +from django.utils.version import PY311 + +from .models import Car, Person, PossessedCar +from .views import empty_response + + +class SkippingTestCase(SimpleTestCase): + def _assert_skipping(self, func, expected_exc, msg=None): + try: + if msg is not None: + with self.assertRaisesMessage(expected_exc, msg): + func() + else: + with self.assertRaises(expected_exc): + func() + except unittest.SkipTest: + self.fail("%s should not result in a skipped test." % func.__name__) + + def test_skip_unless_db_feature(self): + """ + Testing the django.test.skipUnlessDBFeature decorator. + """ + + # Total hack, but it works, just want an attribute that's always true. + @skipUnlessDBFeature("__class__") + def test_func(): + raise ValueError + + @skipUnlessDBFeature("notprovided") + def test_func2(): + raise ValueError + + @skipUnlessDBFeature("__class__", "__class__") + def test_func3(): + raise ValueError + + @skipUnlessDBFeature("__class__", "notprovided") + def test_func4(): + raise ValueError + + self._assert_skipping(test_func, ValueError) + self._assert_skipping(test_func2, unittest.SkipTest) + self._assert_skipping(test_func3, ValueError) + self._assert_skipping(test_func4, unittest.SkipTest) + + class SkipTestCase(SimpleTestCase): + @skipUnlessDBFeature("missing") + def test_foo(self): + pass + + self._assert_skipping( + SkipTestCase("test_foo").test_foo, + ValueError, + "skipUnlessDBFeature cannot be used on test_foo (test_utils.tests." + "SkippingTestCase.test_skip_unless_db_feature..SkipTestCase%s) " + "as SkippingTestCase.test_skip_unless_db_feature..SkipTestCase " + "doesn't allow queries against the 'default' database." + # Python 3.11 uses fully qualified test name in the output. + % (".test_foo" if PY311 else ""), + ) + + def test_skip_if_db_feature(self): + """ + Testing the django.test.skipIfDBFeature decorator. + """ + + @skipIfDBFeature("__class__") + def test_func(): + raise ValueError + + @skipIfDBFeature("notprovided") + def test_func2(): + raise ValueError + + @skipIfDBFeature("__class__", "__class__") + def test_func3(): + raise ValueError + + @skipIfDBFeature("__class__", "notprovided") + def test_func4(): + raise ValueError + + @skipIfDBFeature("notprovided", "notprovided") + def test_func5(): + raise ValueError + + self._assert_skipping(test_func, unittest.SkipTest) + self._assert_skipping(test_func2, ValueError) + self._assert_skipping(test_func3, unittest.SkipTest) + self._assert_skipping(test_func4, unittest.SkipTest) + self._assert_skipping(test_func5, ValueError) + + class SkipTestCase(SimpleTestCase): + @skipIfDBFeature("missing") + def test_foo(self): + pass + + self._assert_skipping( + SkipTestCase("test_foo").test_foo, + ValueError, + "skipIfDBFeature cannot be used on test_foo (test_utils.tests." + "SkippingTestCase.test_skip_if_db_feature..SkipTestCase%s) " + "as SkippingTestCase.test_skip_if_db_feature..SkipTestCase " + "doesn't allow queries against the 'default' database." + # Python 3.11 uses fully qualified test name in the output. + % (".test_foo" if PY311 else ""), + ) + + +class SkippingClassTestCase(TestCase): + def test_skip_class_unless_db_feature(self): + @skipUnlessDBFeature("__class__") + class NotSkippedTests(TestCase): + def test_dummy(self): + return + + @skipUnlessDBFeature("missing") + @skipIfDBFeature("__class__") + class SkippedTests(TestCase): + def test_will_be_skipped(self): + self.fail("We should never arrive here.") + + @skipIfDBFeature("__dict__") + class SkippedTestsSubclass(SkippedTests): + pass + + test_suite = unittest.TestSuite() + test_suite.addTest(NotSkippedTests("test_dummy")) + try: + test_suite.addTest(SkippedTests("test_will_be_skipped")) + test_suite.addTest(SkippedTestsSubclass("test_will_be_skipped")) + except unittest.SkipTest: + self.fail("SkipTest should not be raised here.") + result = unittest.TextTestRunner(stream=StringIO()).run(test_suite) + self.assertEqual(result.testsRun, 3) + self.assertEqual(len(result.skipped), 2) + self.assertEqual(result.skipped[0][1], "Database has feature(s) __class__") + self.assertEqual(result.skipped[1][1], "Database has feature(s) __class__") + + def test_missing_default_databases(self): + @skipIfDBFeature("missing") + class MissingDatabases(SimpleTestCase): + def test_assertion_error(self): + pass + + suite = unittest.TestSuite() + try: + suite.addTest(MissingDatabases("test_assertion_error")) + except unittest.SkipTest: + self.fail("SkipTest should not be raised at this stage") + runner = unittest.TextTestRunner(stream=StringIO()) + msg = ( + "skipIfDBFeature cannot be used on ." + "MissingDatabases'> as it doesn't allow queries against the " + "'default' database." + ) + with self.assertRaisesMessage(ValueError, msg): + runner.run(suite) + + +@override_settings(ROOT_URLCONF="test_utils.urls") +class AssertNumQueriesTests(TestCase): + def test_assert_num_queries(self): + def test_func(): + raise ValueError + + with self.assertRaises(ValueError): + self.assertNumQueries(2, test_func) + + def test_assert_num_queries_with_client(self): + person = Person.objects.create(name="test") + + self.assertNumQueries( + 1, self.client.get, "/test_utils/get_person/%s/" % person.pk + ) + + self.assertNumQueries( + 1, self.client.get, "/test_utils/get_person/%s/" % person.pk + ) + + def test_func(): + self.client.get("/test_utils/get_person/%s/" % person.pk) + self.client.get("/test_utils/get_person/%s/" % person.pk) + + self.assertNumQueries(2, test_func) + + +class AssertNumQueriesUponConnectionTests(TransactionTestCase): + available_apps = [] + + def test_ignores_connection_configuration_queries(self): + real_ensure_connection = connection.ensure_connection + connection.close() + + def make_configuration_query(): + is_opening_connection = connection.connection is None + real_ensure_connection() + + if is_opening_connection: + # Avoid infinite recursion. Creating a cursor calls + # ensure_connection() which is currently mocked by this method. + with connection.cursor() as cursor: + cursor.execute("SELECT 1" + connection.features.bare_select_suffix) + + ensure_connection = ( + "django.db.backends.base.base.BaseDatabaseWrapper.ensure_connection" + ) + with mock.patch(ensure_connection, side_effect=make_configuration_query): + with self.assertNumQueries(1): + list(Car.objects.all()) + + +class AssertQuerySetEqualTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.p1 = Person.objects.create(name="p1") + cls.p2 = Person.objects.create(name="p2") + + def test_rename_assertquerysetequal_deprecation_warning(self): + msg = "assertQuerysetEqual() is deprecated in favor of assertQuerySetEqual()." + with self.assertRaisesMessage(RemovedInDjango51Warning, msg): + self.assertQuerysetEqual() + + @ignore_warnings(category=RemovedInDjango51Warning) + def test_deprecated_assertquerysetequal(self): + self.assertQuerysetEqual(Person.objects.filter(name="p3"), []) + + def test_empty(self): + self.assertQuerySetEqual(Person.objects.filter(name="p3"), []) + + def test_ordered(self): + self.assertQuerySetEqual( + Person.objects.order_by("name"), + [self.p1, self.p2], + ) + + def test_unordered(self): + self.assertQuerySetEqual( + Person.objects.order_by("name"), [self.p2, self.p1], ordered=False + ) + + def test_queryset(self): + self.assertQuerySetEqual( + Person.objects.order_by("name"), + Person.objects.order_by("name"), + ) + + def test_flat_values_list(self): + self.assertQuerySetEqual( + Person.objects.order_by("name").values_list("name", flat=True), + ["p1", "p2"], + ) + + def test_transform(self): + self.assertQuerySetEqual( + Person.objects.order_by("name"), + [self.p1.pk, self.p2.pk], + transform=lambda x: x.pk, + ) + + def test_repr_transform(self): + self.assertQuerySetEqual( + Person.objects.order_by("name"), + [repr(self.p1), repr(self.p2)], + transform=repr, + ) + + def test_undefined_order(self): + # Using an unordered queryset with more than one ordered value + # is an error. + msg = ( + "Trying to compare non-ordered queryset against more than one " + "ordered value." + ) + with self.assertRaisesMessage(ValueError, msg): + self.assertQuerySetEqual( + Person.objects.all(), + [self.p1, self.p2], + ) + # No error for one value. + self.assertQuerySetEqual(Person.objects.filter(name="p1"), [self.p1]) + + def test_repeated_values(self): + """ + assertQuerySetEqual checks the number of appearance of each item + when used with option ordered=False. + """ + batmobile = Car.objects.create(name="Batmobile") + k2000 = Car.objects.create(name="K 2000") + PossessedCar.objects.bulk_create( + [ + PossessedCar(car=batmobile, belongs_to=self.p1), + PossessedCar(car=batmobile, belongs_to=self.p1), + PossessedCar(car=k2000, belongs_to=self.p1), + PossessedCar(car=k2000, belongs_to=self.p1), + PossessedCar(car=k2000, belongs_to=self.p1), + PossessedCar(car=k2000, belongs_to=self.p1), + ] + ) + with self.assertRaises(AssertionError): + self.assertQuerySetEqual( + self.p1.cars.all(), [batmobile, k2000], ordered=False + ) + self.assertQuerySetEqual( + self.p1.cars.all(), [batmobile] * 2 + [k2000] * 4, ordered=False + ) + + def test_maxdiff(self): + names = ["Joe Smith %s" % i for i in range(20)] + Person.objects.bulk_create([Person(name=name) for name in names]) + names.append("Extra Person") + + with self.assertRaises(AssertionError) as ctx: + self.assertQuerySetEqual( + Person.objects.filter(name__startswith="Joe"), + names, + ordered=False, + transform=lambda p: p.name, + ) + self.assertIn("Set self.maxDiff to None to see it.", str(ctx.exception)) + + original = self.maxDiff + self.maxDiff = None + try: + with self.assertRaises(AssertionError) as ctx: + self.assertQuerySetEqual( + Person.objects.filter(name__startswith="Joe"), + names, + ordered=False, + transform=lambda p: p.name, + ) + finally: + self.maxDiff = original + exception_msg = str(ctx.exception) + self.assertNotIn("Set self.maxDiff to None to see it.", exception_msg) + for name in names: + self.assertIn(name, exception_msg) + + +@override_settings(ROOT_URLCONF="test_utils.urls") +class CaptureQueriesContextManagerTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.person_pk = str(Person.objects.create(name="test").pk) + + def test_simple(self): + with CaptureQueriesContext(connection) as captured_queries: + Person.objects.get(pk=self.person_pk) + self.assertEqual(len(captured_queries), 1) + self.assertIn(self.person_pk, captured_queries[0]["sql"]) + + with CaptureQueriesContext(connection) as captured_queries: + pass + self.assertEqual(0, len(captured_queries)) + + def test_within(self): + with CaptureQueriesContext(connection) as captured_queries: + Person.objects.get(pk=self.person_pk) + self.assertEqual(len(captured_queries), 1) + self.assertIn(self.person_pk, captured_queries[0]["sql"]) + + def test_nested(self): + with CaptureQueriesContext(connection) as captured_queries: + Person.objects.count() + with CaptureQueriesContext(connection) as nested_captured_queries: + Person.objects.count() + self.assertEqual(1, len(nested_captured_queries)) + self.assertEqual(2, len(captured_queries)) + + def test_failure(self): + with self.assertRaises(TypeError): + with CaptureQueriesContext(connection): + raise TypeError + + def test_with_client(self): + with CaptureQueriesContext(connection) as captured_queries: + self.client.get("/test_utils/get_person/%s/" % self.person_pk) + self.assertEqual(len(captured_queries), 1) + self.assertIn(self.person_pk, captured_queries[0]["sql"]) + + with CaptureQueriesContext(connection) as captured_queries: + self.client.get("/test_utils/get_person/%s/" % self.person_pk) + self.assertEqual(len(captured_queries), 1) + self.assertIn(self.person_pk, captured_queries[0]["sql"]) + + with CaptureQueriesContext(connection) as captured_queries: + self.client.get("/test_utils/get_person/%s/" % self.person_pk) + self.client.get("/test_utils/get_person/%s/" % self.person_pk) + self.assertEqual(len(captured_queries), 2) + self.assertIn(self.person_pk, captured_queries[0]["sql"]) + self.assertIn(self.person_pk, captured_queries[1]["sql"]) + + +@override_settings(ROOT_URLCONF="test_utils.urls") +class AssertNumQueriesContextManagerTests(TestCase): + def test_simple(self): + with self.assertNumQueries(0): + pass + + with self.assertNumQueries(1): + Person.objects.count() + + with self.assertNumQueries(2): + Person.objects.count() + Person.objects.count() + + def test_failure(self): + msg = "1 != 2 : 1 queries executed, 2 expected\nCaptured queries were:\n1." + with self.assertRaisesMessage(AssertionError, msg): + with self.assertNumQueries(2): + Person.objects.count() + + with self.assertRaises(TypeError): + with self.assertNumQueries(4000): + raise TypeError + + def test_with_client(self): + person = Person.objects.create(name="test") + + with self.assertNumQueries(1): + self.client.get("/test_utils/get_person/%s/" % person.pk) + + with self.assertNumQueries(1): + self.client.get("/test_utils/get_person/%s/" % person.pk) + + with self.assertNumQueries(2): + self.client.get("/test_utils/get_person/%s/" % person.pk) + self.client.get("/test_utils/get_person/%s/" % person.pk) + + +@override_settings(ROOT_URLCONF="test_utils.urls") +class AssertTemplateUsedContextManagerTests(SimpleTestCase): + def test_usage(self): + with self.assertTemplateUsed("template_used/base.html"): + render_to_string("template_used/base.html") + + with self.assertTemplateUsed(template_name="template_used/base.html"): + render_to_string("template_used/base.html") + + with self.assertTemplateUsed("template_used/base.html"): + render_to_string("template_used/include.html") + + with self.assertTemplateUsed("template_used/base.html"): + render_to_string("template_used/extends.html") + + with self.assertTemplateUsed("template_used/base.html"): + render_to_string("template_used/base.html") + render_to_string("template_used/base.html") + + def test_nested_usage(self): + with self.assertTemplateUsed("template_used/base.html"): + with self.assertTemplateUsed("template_used/include.html"): + render_to_string("template_used/include.html") + + with self.assertTemplateUsed("template_used/extends.html"): + with self.assertTemplateUsed("template_used/base.html"): + render_to_string("template_used/extends.html") + + with self.assertTemplateUsed("template_used/base.html"): + with self.assertTemplateUsed("template_used/alternative.html"): + render_to_string("template_used/alternative.html") + render_to_string("template_used/base.html") + + with self.assertTemplateUsed("template_used/base.html"): + render_to_string("template_used/extends.html") + with self.assertTemplateNotUsed("template_used/base.html"): + render_to_string("template_used/alternative.html") + render_to_string("template_used/base.html") + + def test_not_used(self): + with self.assertTemplateNotUsed("template_used/base.html"): + pass + with self.assertTemplateNotUsed("template_used/alternative.html"): + pass + + def test_error_message(self): + msg = "No templates used to render the response" + with self.assertRaisesMessage(AssertionError, msg): + with self.assertTemplateUsed("template_used/base.html"): + pass + + with self.assertRaisesMessage(AssertionError, msg): + with self.assertTemplateUsed(template_name="template_used/base.html"): + pass + + msg2 = ( + "Template 'template_used/base.html' was not a template used to render " + "the response. Actual template(s) used: template_used/alternative.html" + ) + with self.assertRaisesMessage(AssertionError, msg2): + with self.assertTemplateUsed("template_used/base.html"): + render_to_string("template_used/alternative.html") + + with self.assertRaisesMessage( + AssertionError, "No templates used to render the response" + ): + response = self.client.get("/test_utils/no_template_used/") + self.assertTemplateUsed(response, "template_used/base.html") + + def test_msg_prefix(self): + msg_prefix = "Prefix" + msg = f"{msg_prefix}: No templates used to render the response" + with self.assertRaisesMessage(AssertionError, msg): + with self.assertTemplateUsed( + "template_used/base.html", msg_prefix=msg_prefix + ): + pass + + with self.assertRaisesMessage(AssertionError, msg): + with self.assertTemplateUsed( + template_name="template_used/base.html", + msg_prefix=msg_prefix, + ): + pass + + msg = ( + f"{msg_prefix}: Template 'template_used/base.html' was not a " + f"template used to render the response. Actual template(s) used: " + f"template_used/alternative.html" + ) + with self.assertRaisesMessage(AssertionError, msg): + with self.assertTemplateUsed( + "template_used/base.html", msg_prefix=msg_prefix + ): + render_to_string("template_used/alternative.html") + + def test_count(self): + with self.assertTemplateUsed("template_used/base.html", count=2): + render_to_string("template_used/base.html") + render_to_string("template_used/base.html") + + msg = ( + "Template 'template_used/base.html' was expected to be rendered " + "3 time(s) but was actually rendered 2 time(s)." + ) + with self.assertRaisesMessage(AssertionError, msg): + with self.assertTemplateUsed("template_used/base.html", count=3): + render_to_string("template_used/base.html") + render_to_string("template_used/base.html") + + def test_failure(self): + msg = "response and/or template_name argument must be provided" + with self.assertRaisesMessage(TypeError, msg): + with self.assertTemplateUsed(): + pass + + msg = "No templates used to render the response" + with self.assertRaisesMessage(AssertionError, msg): + with self.assertTemplateUsed(""): + pass + + with self.assertRaisesMessage(AssertionError, msg): + with self.assertTemplateUsed(""): + render_to_string("template_used/base.html") + + with self.assertRaisesMessage(AssertionError, msg): + with self.assertTemplateUsed(template_name=""): + pass + + msg = ( + "Template 'template_used/base.html' was not a template used to " + "render the response. Actual template(s) used: " + "template_used/alternative.html" + ) + with self.assertRaisesMessage(AssertionError, msg): + with self.assertTemplateUsed("template_used/base.html"): + render_to_string("template_used/alternative.html") + + def test_assert_used_on_http_response(self): + response = HttpResponse() + msg = "%s() is only usable on responses fetched using the Django test Client." + with self.assertRaisesMessage(ValueError, msg % "assertTemplateUsed"): + self.assertTemplateUsed(response, "template.html") + with self.assertRaisesMessage(ValueError, msg % "assertTemplateNotUsed"): + self.assertTemplateNotUsed(response, "template.html") + + +class HTMLEqualTests(SimpleTestCase): + def test_html_parser(self): + element = parse_html("

    Hello

    ") + self.assertEqual(len(element.children), 1) + self.assertEqual(element.children[0].name, "p") + self.assertEqual(element.children[0].children[0], "Hello") + + parse_html("

    ") + parse_html("

    ") + dom = parse_html("

    foo") + self.assertEqual(len(dom.children), 1) + self.assertEqual(dom.name, "p") + self.assertEqual(dom[0], "foo") + + def test_parse_html_in_script(self): + parse_html('') + parse_html( + """ + + """ + ) + + # script content will be parsed to text + dom = parse_html( + """ + + """ + ) + self.assertEqual(len(dom.children), 1) + self.assertEqual(dom.children[0], "

    foo

    '' bar") + + def test_self_closing_tags(self): + self_closing_tags = [ + "area", + "base", + "br", + "col", + "embed", + "hr", + "img", + "input", + "link", + "meta", + "param", + "source", + "track", + "wbr", + # Deprecated tags + "frame", + "spacer", + ] + for tag in self_closing_tags: + with self.subTest(tag): + dom = parse_html("

    Hello <%s> world

    " % tag) + self.assertEqual(len(dom.children), 3) + self.assertEqual(dom[0], "Hello") + self.assertEqual(dom[1].name, tag) + self.assertEqual(dom[2], "world") + + dom = parse_html("

    Hello <%s /> world

    " % tag) + self.assertEqual(len(dom.children), 3) + self.assertEqual(dom[0], "Hello") + self.assertEqual(dom[1].name, tag) + self.assertEqual(dom[2], "world") + + def test_simple_equal_html(self): + self.assertHTMLEqual("", "") + self.assertHTMLEqual("

    ", "

    ") + self.assertHTMLEqual("

    ", "

    ") + self.assertHTMLEqual("

    Hello

    ", "

    Hello

    ") + self.assertHTMLEqual("

    Hello

    ", "

    Hello

    ") + self.assertHTMLEqual("
    \n

    Hello

    ", "

    Hello

    \n") + self.assertHTMLEqual( + "

    Hello\nWorld !

    ", "

    Hello World\n!

    " + ) + self.assertHTMLEqual( + "

    Hello\nWorld !

    ", "

    Hello World\n!

    " + ) + self.assertHTMLEqual("

    Hello World !

    ", "

    Hello World\n\n!

    ") + self.assertHTMLEqual("

    ", "

    ") + self.assertHTMLEqual("

    ", "

    ") + self.assertHTMLEqual("

    ", "

    ") + self.assertHTMLEqual("", '') + self.assertHTMLEqual("

    Hello", "

    Hello") + self.assertHTMLEqual("

    Hello

    World", "

    Hello

    World") + + def test_ignore_comments(self): + self.assertHTMLEqual( + "
    Hello World!
    ", + "
    Hello World!
    ", + ) + + def test_unequal_html(self): + self.assertHTMLNotEqual("

    Hello

    ", "

    Hello!

    ") + self.assertHTMLNotEqual("

    foobar

    ", "

    foo bar

    ") + self.assertHTMLNotEqual("

    foo bar

    ", "

    foo  bar

    ") + self.assertHTMLNotEqual("

    foo nbsp

    ", "

    foo  

    ") + self.assertHTMLNotEqual("

    foo #20

    ", "

    foo 

    ") + self.assertHTMLNotEqual( + "

    HelloWorld

    ", + "

    HelloWorld

    ", + ) + self.assertHTMLNotEqual( + "

    HelloWorld

    ", + "

    HelloWorld

    ", + ) + + def test_attributes(self): + self.assertHTMLEqual( + '', '' + ) + self.assertHTMLEqual( + """""", + '', + ) + self.assertHTMLNotEqual( + '', + '', + ) + + def test_class_attribute(self): + pairs = [ + ('

    ', '

    '), + ('

    ', '

    '), + ('

    ', '

    '), + ('

    ', '

    '), + ('

    ', '

    '), + ('

    ', '

    '), + ('

    ', '

    '), + ] + for html1, html2 in pairs: + with self.subTest(html1): + self.assertHTMLEqual(html1, html2) + + def test_boolean_attribute(self): + html1 = "" + html2 = '' + html3 = '' + self.assertHTMLEqual(html1, html2) + self.assertHTMLEqual(html1, html3) + self.assertHTMLEqual(html2, html3) + self.assertHTMLNotEqual(html1, '') + self.assertEqual(str(parse_html(html1)), "") + self.assertEqual(str(parse_html(html2)), "") + self.assertEqual(str(parse_html(html3)), "") + + def test_non_boolean_attibutes(self): + html1 = "" + html2 = '' + html3 = '' + self.assertHTMLEqual(html1, html2) + self.assertHTMLNotEqual(html1, html3) + self.assertEqual(str(parse_html(html1)), '') + self.assertEqual(str(parse_html(html2)), '') + + def test_normalize_refs(self): + pairs = [ + ("'", "'"), + ("'", "'"), + ("'", "'"), + ("'", "'"), + ("'", "'"), + ("'", "'"), + ("&", "&"), + ("&", "&"), + ("&", "&"), + ("&", "&"), + ("&", "&"), + ("&", "&"), + ("&", "&"), + ("&", "&"), + ("&", "&"), + ("&", "&"), + ("&", "&"), + ("&", "&"), + ] + for pair in pairs: + with self.subTest(repr(pair)): + self.assertHTMLEqual(*pair) + + def test_complex_examples(self): + self.assertHTMLEqual( + """ + + + + +""", # NOQA + """ + + + + + + + + + + + + + """, + ) + + self.assertHTMLEqual( + """ + + + + Document + + + +

    + This is a valid paragraph +

    this is a div AFTER the p
    + + """, + """ + + + + Document + + + +

    This is a valid paragraph + +

    this is a div AFTER the p
    +

    + + """, + ) + + def test_html_contain(self): + # equal html contains each other + dom1 = parse_html("

    foo") + dom2 = parse_html("

    foo

    ") + self.assertIn(dom1, dom2) + self.assertIn(dom2, dom1) + + dom2 = parse_html("

    foo

    ") + self.assertIn(dom1, dom2) + self.assertNotIn(dom2, dom1) + + self.assertNotIn("

    foo

    ", dom2) + self.assertIn("foo", dom2) + + # when a root element is used ... + dom1 = parse_html("

    foo

    bar

    ") + dom2 = parse_html("

    foo

    bar

    ") + self.assertIn(dom1, dom2) + dom1 = parse_html("

    foo

    ") + self.assertIn(dom1, dom2) + dom1 = parse_html("

    bar

    ") + self.assertIn(dom1, dom2) + dom1 = parse_html("

    foo

    bar

    ") + self.assertIn(dom2, dom1) + + def test_count(self): + # equal html contains each other one time + dom1 = parse_html("

    foo") + dom2 = parse_html("

    foo

    ") + self.assertEqual(dom1.count(dom2), 1) + self.assertEqual(dom2.count(dom1), 1) + + dom2 = parse_html("

    foo

    bar

    ") + self.assertEqual(dom2.count(dom1), 1) + + dom2 = parse_html("

    foo foo

    foo

    ") + self.assertEqual(dom2.count("foo"), 3) + + dom2 = parse_html('

    foo

    ') + self.assertEqual(dom2.count("bar"), 0) + self.assertEqual(dom2.count("class"), 0) + self.assertEqual(dom2.count("p"), 0) + self.assertEqual(dom2.count("o"), 2) + + dom2 = parse_html("

    foo

    foo

    ") + self.assertEqual(dom2.count(dom1), 2) + + dom2 = parse_html('

    foo

    foo

    ') + self.assertEqual(dom2.count(dom1), 1) + + dom2 = parse_html("

    foo

    ") + self.assertEqual(dom2.count(dom1), 1) + + dom2 = parse_html("

    foo

    foo

    ") + self.assertEqual(dom2.count(dom1), 1) + + dom2 = parse_html("

    foo

    bar

    ") + self.assertEqual(dom2.count(dom1), 0) + + # HTML with a root element contains the same HTML with no root element. + dom1 = parse_html("

    foo

    bar

    ") + dom2 = parse_html("

    foo

    bar

    ") + self.assertEqual(dom2.count(dom1), 1) + + # Target of search is a sequence of child elements and appears more + # than once. + dom2 = parse_html("

    foo

    bar

    foo

    bar

    ") + self.assertEqual(dom2.count(dom1), 2) + + # Searched HTML has additional children. + dom1 = parse_html("
    ") + dom2 = parse_html("") + self.assertEqual(dom2.count(dom1), 1) + + # No match found in children. + dom1 = parse_html("") + self.assertEqual(dom2.count(dom1), 0) + + # Target of search found among children and grandchildren. + dom1 = parse_html("") + dom2 = parse_html("") + self.assertEqual(dom2.count(dom1), 2) + + def test_root_element_escaped_html(self): + html = "<br>" + parsed = parse_html(html) + self.assertEqual(str(parsed), html) + + def test_parsing_errors(self): + with self.assertRaises(AssertionError): + self.assertHTMLEqual("

    ", "") + with self.assertRaises(AssertionError): + self.assertHTMLEqual("", "

    ") + error_msg = ( + "First argument is not valid HTML:\n" + "('Unexpected end tag `div` (Line 1, Column 6)', (1, 6))" + ) + with self.assertRaisesMessage(AssertionError, error_msg): + self.assertHTMLEqual("< div>", "

    ") + with self.assertRaises(HTMLParseError): + parse_html("

    ") + + def test_escaped_html_errors(self): + msg = "

    \n\n

    !=

    \n<foo>\n

    \n" + with self.assertRaisesMessage(AssertionError, msg): + self.assertHTMLEqual("

    ", "

    <foo>

    ") + with self.assertRaisesMessage(AssertionError, msg): + self.assertHTMLEqual("

    ", "

    <foo>

    ") + + def test_contains_html(self): + response = HttpResponse( + """ + This is a form:
    + +
    """ + ) + + self.assertNotContains(response, "") + self.assertContains(response, '
    ') + + self.assertContains(response, "", html=True) + self.assertNotContains(response, '', html=True) + + invalid_response = HttpResponse(""">""") + + with self.assertRaises(AssertionError): + self.assertContains(invalid_response, "

    ") + + with self.assertRaises(AssertionError): + self.assertContains(response, '

    ') + + def test_unicode_handling(self): + response = HttpResponse( + '

    Some help text for the title (with Unicode ŠĐĆŽćžšđ)

    ' + ) + self.assertContains( + response, + '

    Some help text for the title (with Unicode ŠĐĆŽćžšđ)

    ', + html=True, + ) + + +class JSONEqualTests(SimpleTestCase): + def test_simple_equal(self): + json1 = '{"attr1": "foo", "attr2":"baz"}' + json2 = '{"attr1": "foo", "attr2":"baz"}' + self.assertJSONEqual(json1, json2) + + def test_simple_equal_unordered(self): + json1 = '{"attr1": "foo", "attr2":"baz"}' + json2 = '{"attr2":"baz", "attr1": "foo"}' + self.assertJSONEqual(json1, json2) + + def test_simple_equal_raise(self): + json1 = '{"attr1": "foo", "attr2":"baz"}' + json2 = '{"attr2":"baz"}' + with self.assertRaises(AssertionError): + self.assertJSONEqual(json1, json2) + + def test_equal_parsing_errors(self): + invalid_json = '{"attr1": "foo, "attr2":"baz"}' + valid_json = '{"attr1": "foo", "attr2":"baz"}' + with self.assertRaises(AssertionError): + self.assertJSONEqual(invalid_json, valid_json) + with self.assertRaises(AssertionError): + self.assertJSONEqual(valid_json, invalid_json) + + def test_simple_not_equal(self): + json1 = '{"attr1": "foo", "attr2":"baz"}' + json2 = '{"attr2":"baz"}' + self.assertJSONNotEqual(json1, json2) + + def test_simple_not_equal_raise(self): + json1 = '{"attr1": "foo", "attr2":"baz"}' + json2 = '{"attr1": "foo", "attr2":"baz"}' + with self.assertRaises(AssertionError): + self.assertJSONNotEqual(json1, json2) + + def test_not_equal_parsing_errors(self): + invalid_json = '{"attr1": "foo, "attr2":"baz"}' + valid_json = '{"attr1": "foo", "attr2":"baz"}' + with self.assertRaises(AssertionError): + self.assertJSONNotEqual(invalid_json, valid_json) + with self.assertRaises(AssertionError): + self.assertJSONNotEqual(valid_json, invalid_json) + + +class XMLEqualTests(SimpleTestCase): + def test_simple_equal(self): + xml1 = "" + xml2 = "" + self.assertXMLEqual(xml1, xml2) + + def test_simple_equal_unordered(self): + xml1 = "" + xml2 = "" + self.assertXMLEqual(xml1, xml2) + + def test_simple_equal_raise(self): + xml1 = "" + xml2 = "" + with self.assertRaises(AssertionError): + self.assertXMLEqual(xml1, xml2) + + def test_simple_equal_raises_message(self): + xml1 = "" + xml2 = "" + + msg = """{xml1} != {xml2} +- ++ +? ++++++++++ +""".format( + xml1=repr(xml1), xml2=repr(xml2) + ) + + with self.assertRaisesMessage(AssertionError, msg): + self.assertXMLEqual(xml1, xml2) + + def test_simple_not_equal(self): + xml1 = "" + xml2 = "" + self.assertXMLNotEqual(xml1, xml2) + + def test_simple_not_equal_raise(self): + xml1 = "" + xml2 = "" + with self.assertRaises(AssertionError): + self.assertXMLNotEqual(xml1, xml2) + + def test_parsing_errors(self): + xml_unvalid = "" + xml2 = "" + with self.assertRaises(AssertionError): + self.assertXMLNotEqual(xml_unvalid, xml2) + + def test_comment_root(self): + xml1 = "" + xml2 = "" + self.assertXMLEqual(xml1, xml2) + + def test_simple_equal_with_leading_or_trailing_whitespace(self): + xml1 = "foo \t\n" + xml2 = " \t\nfoo" + self.assertXMLEqual(xml1, xml2) + + def test_simple_not_equal_with_whitespace_in_the_middle(self): + xml1 = "foobar" + xml2 = "foo bar" + self.assertXMLNotEqual(xml1, xml2) + + def test_doctype_root(self): + xml1 = '' + xml2 = '' + self.assertXMLEqual(xml1, xml2) + + def test_processing_instruction(self): + xml1 = ( + '' + '' + ) + xml2 = ( + '' + '' + ) + self.assertXMLEqual(xml1, xml2) + self.assertXMLEqual( + '', + '', + ) + + +class SkippingExtraTests(TestCase): + fixtures = ["should_not_be_loaded.json"] + + # HACK: This depends on internals of our TestCase subclasses + def __call__(self, result=None): + # Detect fixture loading by counting SQL queries, should be zero + with self.assertNumQueries(0): + super().__call__(result) + + @unittest.skip("Fixture loading should not be performed for skipped tests.") + def test_fixtures_are_skipped(self): + pass + + +class AssertRaisesMsgTest(SimpleTestCase): + def test_assert_raises_message(self): + msg = "'Expected message' not found in 'Unexpected message'" + # context manager form of assertRaisesMessage() + with self.assertRaisesMessage(AssertionError, msg): + with self.assertRaisesMessage(ValueError, "Expected message"): + raise ValueError("Unexpected message") + + # callable form + def func(): + raise ValueError("Unexpected message") + + with self.assertRaisesMessage(AssertionError, msg): + self.assertRaisesMessage(ValueError, "Expected message", func) + + def test_special_re_chars(self): + """assertRaisesMessage shouldn't interpret RE special chars.""" + + def func1(): + raise ValueError("[.*x+]y?") + + with self.assertRaisesMessage(ValueError, "[.*x+]y?"): + func1() + + +class AssertWarnsMessageTests(SimpleTestCase): + def test_context_manager(self): + with self.assertWarnsMessage(UserWarning, "Expected message"): + warnings.warn("Expected message", UserWarning) + + def test_context_manager_failure(self): + msg = "Expected message' not found in 'Unexpected message'" + with self.assertRaisesMessage(AssertionError, msg): + with self.assertWarnsMessage(UserWarning, "Expected message"): + warnings.warn("Unexpected message", UserWarning) + + def test_callable(self): + def func(): + warnings.warn("Expected message", UserWarning) + + self.assertWarnsMessage(UserWarning, "Expected message", func) + + def test_special_re_chars(self): + def func1(): + warnings.warn("[.*x+]y?", UserWarning) + + with self.assertWarnsMessage(UserWarning, "[.*x+]y?"): + func1() + + +class AssertFieldOutputTests(SimpleTestCase): + def test_assert_field_output(self): + error_invalid = ["Enter a valid email address."] + self.assertFieldOutput( + EmailField, {"a@a.com": "a@a.com"}, {"aaa": error_invalid} + ) + with self.assertRaises(AssertionError): + self.assertFieldOutput( + EmailField, + {"a@a.com": "a@a.com"}, + {"aaa": error_invalid + ["Another error"]}, + ) + with self.assertRaises(AssertionError): + self.assertFieldOutput( + EmailField, {"a@a.com": "Wrong output"}, {"aaa": error_invalid} + ) + with self.assertRaises(AssertionError): + self.assertFieldOutput( + EmailField, + {"a@a.com": "a@a.com"}, + {"aaa": ["Come on, gimme some well formatted data, dude."]}, + ) + + def test_custom_required_message(self): + class MyCustomField(IntegerField): + default_error_messages = { + "required": "This is really required.", + } + + self.assertFieldOutput(MyCustomField, {}, {}, empty_value=None) + + +@override_settings(ROOT_URLCONF="test_utils.urls") +class AssertURLEqualTests(SimpleTestCase): + def test_equal(self): + valid_tests = ( + ("http://example.com/?", "http://example.com/"), + ("http://example.com/?x=1&", "http://example.com/?x=1"), + ("http://example.com/?x=1&y=2", "http://example.com/?y=2&x=1"), + ("http://example.com/?x=1&y=2", "http://example.com/?y=2&x=1"), + ( + "http://example.com/?x=1&y=2&a=1&a=2", + "http://example.com/?a=1&a=2&y=2&x=1", + ), + ("/path/to/?x=1&y=2&z=3", "/path/to/?z=3&y=2&x=1"), + ("?x=1&y=2&z=3", "?z=3&y=2&x=1"), + ("/test_utils/no_template_used/", reverse_lazy("no_template_used")), + ) + for url1, url2 in valid_tests: + with self.subTest(url=url1): + self.assertURLEqual(url1, url2) + + def test_not_equal(self): + invalid_tests = ( + # Protocol must be the same. + ("http://example.com/", "https://example.com/"), + ("http://example.com/?x=1&x=2", "https://example.com/?x=2&x=1"), + ("http://example.com/?x=1&y=bar&x=2", "https://example.com/?y=bar&x=2&x=1"), + # Parameters of the same name must be in the same order. + ("/path/to?a=1&a=2", "/path/to/?a=2&a=1"), + ) + for url1, url2 in invalid_tests: + with self.subTest(url=url1), self.assertRaises(AssertionError): + self.assertURLEqual(url1, url2) + + def test_message(self): + msg = ( + "Expected 'http://example.com/?x=1&x=2' to equal " + "'https://example.com/?x=2&x=1'" + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertURLEqual( + "http://example.com/?x=1&x=2", "https://example.com/?x=2&x=1" + ) + + def test_msg_prefix(self): + msg = ( + "Prefix: Expected 'http://example.com/?x=1&x=2' to equal " + "'https://example.com/?x=2&x=1'" + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertURLEqual( + "http://example.com/?x=1&x=2", + "https://example.com/?x=2&x=1", + msg_prefix="Prefix: ", + ) + + +class TestForm(Form): + field = CharField() + + def clean_field(self): + value = self.cleaned_data.get("field", "") + if value == "invalid": + raise ValidationError("invalid value") + return value + + def clean(self): + if self.cleaned_data.get("field") == "invalid_non_field": + raise ValidationError("non-field error") + return self.cleaned_data + + @classmethod + def _get_cleaned_form(cls, field_value): + form = cls({"field": field_value}) + form.full_clean() + return form + + @classmethod + def valid(cls): + return cls._get_cleaned_form("valid") + + @classmethod + def invalid(cls, nonfield=False): + return cls._get_cleaned_form("invalid_non_field" if nonfield else "invalid") + + +class TestFormset(formset_factory(TestForm)): + @classmethod + def _get_cleaned_formset(cls, field_value): + formset = cls( + { + "form-TOTAL_FORMS": "1", + "form-INITIAL_FORMS": "0", + "form-0-field": field_value, + } + ) + formset.full_clean() + return formset + + @classmethod + def valid(cls): + return cls._get_cleaned_formset("valid") + + @classmethod + def invalid(cls, nonfield=False, nonform=False): + if nonform: + formset = cls({}, error_messages={"missing_management_form": "error"}) + formset.full_clean() + return formset + return cls._get_cleaned_formset("invalid_non_field" if nonfield else "invalid") + + +class AssertFormErrorTests(SimpleTestCase): + def test_single_error(self): + self.assertFormError(TestForm.invalid(), "field", "invalid value") + + def test_error_list(self): + self.assertFormError(TestForm.invalid(), "field", ["invalid value"]) + + def test_empty_errors_valid_form(self): + self.assertFormError(TestForm.valid(), "field", []) + + def test_empty_errors_valid_form_non_field_errors(self): + self.assertFormError(TestForm.valid(), None, []) + + def test_field_not_in_form(self): + msg = ( + "The form does not " + "contain the field 'other_field'." + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertFormError(TestForm.invalid(), "other_field", "invalid value") + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormError( + TestForm.invalid(), + "other_field", + "invalid value", + msg_prefix=msg_prefix, + ) + + def test_field_with_no_errors(self): + msg = ( + "The errors of field 'field' on form don't match." + ) + with self.assertRaisesMessage(AssertionError, msg) as ctx: + self.assertFormError(TestForm.valid(), "field", "invalid value") + self.assertIn("[] != ['invalid value']", str(ctx.exception)) + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormError( + TestForm.valid(), "field", "invalid value", msg_prefix=msg_prefix + ) + + def test_field_with_different_error(self): + msg = ( + "The errors of field 'field' on form don't match." + ) + with self.assertRaisesMessage(AssertionError, msg) as ctx: + self.assertFormError(TestForm.invalid(), "field", "other error") + self.assertIn("['invalid value'] != ['other error']", str(ctx.exception)) + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormError( + TestForm.invalid(), "field", "other error", msg_prefix=msg_prefix + ) + + def test_unbound_form(self): + msg = ( + "The form is not " + "bound, it will never have any errors." + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertFormError(TestForm(), "field", []) + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormError(TestForm(), "field", [], msg_prefix=msg_prefix) + + def test_empty_errors_invalid_form(self): + msg = ( + "The errors of field 'field' on form don't match." + ) + with self.assertRaisesMessage(AssertionError, msg) as ctx: + self.assertFormError(TestForm.invalid(), "field", []) + self.assertIn("['invalid value'] != []", str(ctx.exception)) + + def test_non_field_errors(self): + self.assertFormError(TestForm.invalid(nonfield=True), None, "non-field error") + + def test_different_non_field_errors(self): + msg = ( + "The non-field errors of form don't match." + ) + with self.assertRaisesMessage(AssertionError, msg) as ctx: + self.assertFormError( + TestForm.invalid(nonfield=True), None, "other non-field error" + ) + self.assertIn( + "['non-field error'] != ['other non-field error']", str(ctx.exception) + ) + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormError( + TestForm.invalid(nonfield=True), + None, + "other non-field error", + msg_prefix=msg_prefix, + ) + + +class AssertFormSetErrorTests(SimpleTestCase): + def test_rename_assertformseterror_deprecation_warning(self): + msg = "assertFormsetError() is deprecated in favor of assertFormSetError()." + with self.assertRaisesMessage(RemovedInDjango51Warning, msg): + self.assertFormsetError() + + @ignore_warnings(category=RemovedInDjango51Warning) + def test_deprecated_assertformseterror(self): + self.assertFormsetError(TestFormset.invalid(), 0, "field", "invalid value") + + def test_single_error(self): + self.assertFormSetError(TestFormset.invalid(), 0, "field", "invalid value") + + def test_error_list(self): + self.assertFormSetError(TestFormset.invalid(), 0, "field", ["invalid value"]) + + def test_empty_errors_valid_formset(self): + self.assertFormSetError(TestFormset.valid(), 0, "field", []) + + def test_multiple_forms(self): + formset = TestFormset( + { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "0", + "form-0-field": "valid", + "form-1-field": "invalid", + } + ) + formset.full_clean() + self.assertFormSetError(formset, 0, "field", []) + self.assertFormSetError(formset, 1, "field", ["invalid value"]) + + def test_field_not_in_form(self): + msg = ( + "The form 0 of formset " + "does not contain the field 'other_field'." + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertFormSetError( + TestFormset.invalid(), 0, "other_field", "invalid value" + ) + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormSetError( + TestFormset.invalid(), + 0, + "other_field", + "invalid value", + msg_prefix=msg_prefix, + ) + + def test_field_with_no_errors(self): + msg = ( + "The errors of field 'field' on form 0 of formset don't match." + ) + with self.assertRaisesMessage(AssertionError, msg) as ctx: + self.assertFormSetError(TestFormset.valid(), 0, "field", "invalid value") + self.assertIn("[] != ['invalid value']", str(ctx.exception)) + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormSetError( + TestFormset.valid(), 0, "field", "invalid value", msg_prefix=msg_prefix + ) + + def test_field_with_different_error(self): + msg = ( + "The errors of field 'field' on form 0 of formset don't match." + ) + with self.assertRaisesMessage(AssertionError, msg) as ctx: + self.assertFormSetError(TestFormset.invalid(), 0, "field", "other error") + self.assertIn("['invalid value'] != ['other error']", str(ctx.exception)) + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormSetError( + TestFormset.invalid(), 0, "field", "other error", msg_prefix=msg_prefix + ) + + def test_unbound_formset(self): + msg = ( + "The formset is not " + "bound, it will never have any errors." + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertFormSetError(TestFormset(), 0, "field", []) + + def test_empty_errors_invalid_formset(self): + msg = ( + "The errors of field 'field' on form 0 of formset don't match." + ) + with self.assertRaisesMessage(AssertionError, msg) as ctx: + self.assertFormSetError(TestFormset.invalid(), 0, "field", []) + self.assertIn("['invalid value'] != []", str(ctx.exception)) + + def test_non_field_errors(self): + self.assertFormSetError( + TestFormset.invalid(nonfield=True), 0, None, "non-field error" + ) + + def test_different_non_field_errors(self): + msg = ( + "The non-field errors of form 0 of formset don't match." + ) + with self.assertRaisesMessage(AssertionError, msg) as ctx: + self.assertFormSetError( + TestFormset.invalid(nonfield=True), 0, None, "other non-field error" + ) + self.assertIn( + "['non-field error'] != ['other non-field error']", str(ctx.exception) + ) + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormSetError( + TestFormset.invalid(nonfield=True), + 0, + None, + "other non-field error", + msg_prefix=msg_prefix, + ) + + def test_no_non_field_errors(self): + msg = ( + "The non-field errors of form 0 of formset don't match." + ) + with self.assertRaisesMessage(AssertionError, msg) as ctx: + self.assertFormSetError(TestFormset.invalid(), 0, None, "non-field error") + self.assertIn("[] != ['non-field error']", str(ctx.exception)) + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormSetError( + TestFormset.invalid(), 0, None, "non-field error", msg_prefix=msg_prefix + ) + + def test_non_form_errors(self): + self.assertFormSetError(TestFormset.invalid(nonform=True), None, None, "error") + + def test_different_non_form_errors(self): + msg = ( + "The non-form errors of formset don't match." + ) + with self.assertRaisesMessage(AssertionError, msg) as ctx: + self.assertFormSetError( + TestFormset.invalid(nonform=True), None, None, "other error" + ) + self.assertIn("['error'] != ['other error']", str(ctx.exception)) + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormSetError( + TestFormset.invalid(nonform=True), + None, + None, + "other error", + msg_prefix=msg_prefix, + ) + + def test_no_non_form_errors(self): + msg = ( + "The non-form errors of formset don't match." + ) + with self.assertRaisesMessage(AssertionError, msg) as ctx: + self.assertFormSetError(TestFormset.invalid(), None, None, "error") + self.assertIn("[] != ['error']", str(ctx.exception)) + msg_prefix = "Custom prefix" + with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"): + self.assertFormSetError( + TestFormset.invalid(), + None, + None, + "error", + msg_prefix=msg_prefix, + ) + + def test_non_form_errors_with_field(self): + msg = "You must use field=None with form_index=None." + with self.assertRaisesMessage(ValueError, msg): + self.assertFormSetError( + TestFormset.invalid(nonform=True), None, "field", "error" + ) + + def test_form_index_too_big(self): + msg = ( + "The formset only has " + "1 form." + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertFormSetError(TestFormset.invalid(), 2, "field", "error") + + def test_form_index_too_big_plural(self): + formset = TestFormset( + { + "form-TOTAL_FORMS": "2", + "form-INITIAL_FORMS": "0", + "form-0-field": "valid", + "form-1-field": "valid", + } + ) + formset.full_clean() + msg = ( + "The formset only has 2 " + "forms." + ) + with self.assertRaisesMessage(AssertionError, msg): + self.assertFormSetError(formset, 2, "field", "error") + + +class FirstUrls: + urlpatterns = [path("first/", empty_response, name="first")] + + +class SecondUrls: + urlpatterns = [path("second/", empty_response, name="second")] + + +class SetupTestEnvironmentTests(SimpleTestCase): + def test_setup_test_environment_calling_more_than_once(self): + with self.assertRaisesMessage( + RuntimeError, "setup_test_environment() was already called" + ): + setup_test_environment() + + def test_allowed_hosts(self): + for type_ in (list, tuple): + with self.subTest(type_=type_): + allowed_hosts = type_("*") + with mock.patch("django.test.utils._TestState") as x: + del x.saved_data + with self.settings(ALLOWED_HOSTS=allowed_hosts): + setup_test_environment() + self.assertEqual(settings.ALLOWED_HOSTS, ["*", "testserver"]) + + +class OverrideSettingsTests(SimpleTestCase): + # #21518 -- If neither override_settings nor a setting_changed receiver + # clears the URL cache between tests, then one of test_first or + # test_second will fail. + + @override_settings(ROOT_URLCONF=FirstUrls) + def test_urlconf_first(self): + reverse("first") + + @override_settings(ROOT_URLCONF=SecondUrls) + def test_urlconf_second(self): + reverse("second") + + def test_urlconf_cache(self): + with self.assertRaises(NoReverseMatch): + reverse("first") + with self.assertRaises(NoReverseMatch): + reverse("second") + + with override_settings(ROOT_URLCONF=FirstUrls): + self.client.get(reverse("first")) + with self.assertRaises(NoReverseMatch): + reverse("second") + + with override_settings(ROOT_URLCONF=SecondUrls): + with self.assertRaises(NoReverseMatch): + reverse("first") + self.client.get(reverse("second")) + + self.client.get(reverse("first")) + with self.assertRaises(NoReverseMatch): + reverse("second") + + with self.assertRaises(NoReverseMatch): + reverse("first") + with self.assertRaises(NoReverseMatch): + reverse("second") + + def test_override_media_root(self): + """ + Overriding the MEDIA_ROOT setting should be reflected in the + base_location attribute of django.core.files.storage.default_storage. + """ + self.assertEqual(default_storage.base_location, "") + with self.settings(MEDIA_ROOT="test_value"): + self.assertEqual(default_storage.base_location, "test_value") + + def test_override_media_url(self): + """ + Overriding the MEDIA_URL setting should be reflected in the + base_url attribute of django.core.files.storage.default_storage. + """ + self.assertEqual(default_storage.base_location, "") + with self.settings(MEDIA_URL="/test_value/"): + self.assertEqual(default_storage.base_url, "/test_value/") + + def test_override_file_upload_permissions(self): + """ + Overriding the FILE_UPLOAD_PERMISSIONS setting should be reflected in + the file_permissions_mode attribute of + django.core.files.storage.default_storage. + """ + self.assertEqual(default_storage.file_permissions_mode, 0o644) + with self.settings(FILE_UPLOAD_PERMISSIONS=0o777): + self.assertEqual(default_storage.file_permissions_mode, 0o777) + + def test_override_file_upload_directory_permissions(self): + """ + Overriding the FILE_UPLOAD_DIRECTORY_PERMISSIONS setting should be + reflected in the directory_permissions_mode attribute of + django.core.files.storage.default_storage. + """ + self.assertIsNone(default_storage.directory_permissions_mode) + with self.settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o777): + self.assertEqual(default_storage.directory_permissions_mode, 0o777) + + def test_override_database_routers(self): + """ + Overriding DATABASE_ROUTERS should update the base router. + """ + test_routers = [object()] + with self.settings(DATABASE_ROUTERS=test_routers): + self.assertEqual(router.routers, test_routers) + + def test_override_static_url(self): + """ + Overriding the STATIC_URL setting should be reflected in the + base_url attribute of + django.contrib.staticfiles.storage.staticfiles_storage. + """ + with self.settings(STATIC_URL="/test/"): + self.assertEqual(staticfiles_storage.base_url, "/test/") + + def test_override_static_root(self): + """ + Overriding the STATIC_ROOT setting should be reflected in the + location attribute of + django.contrib.staticfiles.storage.staticfiles_storage. + """ + with self.settings(STATIC_ROOT="/tmp/test"): + self.assertEqual(staticfiles_storage.location, os.path.abspath("/tmp/test")) + + def test_override_staticfiles_storage(self): + """ + Overriding the STORAGES setting should be reflected in + the value of django.contrib.staticfiles.storage.staticfiles_storage. + """ + new_class = "ManifestStaticFilesStorage" + new_storage = "django.contrib.staticfiles.storage." + new_class + with self.settings( + STORAGES={STATICFILES_STORAGE_ALIAS: {"BACKEND": new_storage}} + ): + self.assertEqual(staticfiles_storage.__class__.__name__, new_class) + + def test_override_staticfiles_finders(self): + """ + Overriding the STATICFILES_FINDERS setting should be reflected in + the return value of django.contrib.staticfiles.finders.get_finders. + """ + current = get_finders() + self.assertGreater(len(list(current)), 1) + finders = ["django.contrib.staticfiles.finders.FileSystemFinder"] + with self.settings(STATICFILES_FINDERS=finders): + self.assertEqual(len(list(get_finders())), len(finders)) + + def test_override_staticfiles_dirs(self): + """ + Overriding the STATICFILES_DIRS setting should be reflected in + the locations attribute of the + django.contrib.staticfiles.finders.FileSystemFinder instance. + """ + finder = get_finder("django.contrib.staticfiles.finders.FileSystemFinder") + test_path = "/tmp/test" + expected_location = ("", test_path) + self.assertNotIn(expected_location, finder.locations) + with self.settings(STATICFILES_DIRS=[test_path]): + finder = get_finder("django.contrib.staticfiles.finders.FileSystemFinder") + self.assertIn(expected_location, finder.locations) + + +@skipUnlessDBFeature("supports_transactions") +class TestBadSetUpTestData(TestCase): + """ + An exception in setUpTestData() shouldn't leak a transaction which would + cascade across the rest of the test suite. + """ + + class MyException(Exception): + pass + + @classmethod + def setUpClass(cls): + try: + super().setUpClass() + except cls.MyException: + cls._in_atomic_block = connection.in_atomic_block + + @classmethod + def tearDownClass(Cls): + # override to avoid a second cls._rollback_atomics() which would fail. + # Normal setUpClass() methods won't have exception handling so this + # method wouldn't typically be run. + pass + + @classmethod + def setUpTestData(cls): + # Simulate a broken setUpTestData() method. + raise cls.MyException() + + def test_failure_in_setUpTestData_should_rollback_transaction(self): + # setUpTestData() should call _rollback_atomics() so that the + # transaction doesn't leak. + self.assertFalse(self._in_atomic_block) + + +@skipUnlessDBFeature("supports_transactions") +class CaptureOnCommitCallbacksTests(TestCase): + databases = {"default", "other"} + callback_called = False + + def enqueue_callback(self, using="default"): + def hook(): + self.callback_called = True + + transaction.on_commit(hook, using=using) + + def test_no_arguments(self): + with self.captureOnCommitCallbacks() as callbacks: + self.enqueue_callback() + + self.assertEqual(len(callbacks), 1) + self.assertIs(self.callback_called, False) + callbacks[0]() + self.assertIs(self.callback_called, True) + + def test_using(self): + with self.captureOnCommitCallbacks(using="other") as callbacks: + self.enqueue_callback(using="other") + + self.assertEqual(len(callbacks), 1) + self.assertIs(self.callback_called, False) + callbacks[0]() + self.assertIs(self.callback_called, True) + + def test_different_using(self): + with self.captureOnCommitCallbacks(using="default") as callbacks: + self.enqueue_callback(using="other") + + self.assertEqual(callbacks, []) + + def test_execute(self): + with self.captureOnCommitCallbacks(execute=True) as callbacks: + self.enqueue_callback() + + self.assertEqual(len(callbacks), 1) + self.assertIs(self.callback_called, True) + + def test_pre_callback(self): + def pre_hook(): + pass + + transaction.on_commit(pre_hook, using="default") + with self.captureOnCommitCallbacks() as callbacks: + self.enqueue_callback() + + self.assertEqual(len(callbacks), 1) + self.assertNotEqual(callbacks[0], pre_hook) + + def test_with_rolled_back_savepoint(self): + with self.captureOnCommitCallbacks() as callbacks: + try: + with transaction.atomic(): + self.enqueue_callback() + raise IntegrityError + except IntegrityError: + # Inner transaction.atomic() has been rolled back. + pass + + self.assertEqual(callbacks, []) + + def test_execute_recursive(self): + with self.captureOnCommitCallbacks(execute=True) as callbacks: + transaction.on_commit(self.enqueue_callback) + + self.assertEqual(len(callbacks), 2) + self.assertIs(self.callback_called, True) + + def test_execute_tree(self): + """ + A visualisation of the callback tree tested. Each node is expected to + be visited only once: + + └─branch_1 + ├─branch_2 + │ ├─leaf_1 + │ └─leaf_2 + └─leaf_3 + """ + branch_1_call_counter = 0 + branch_2_call_counter = 0 + leaf_1_call_counter = 0 + leaf_2_call_counter = 0 + leaf_3_call_counter = 0 + + def leaf_1(): + nonlocal leaf_1_call_counter + leaf_1_call_counter += 1 + + def leaf_2(): + nonlocal leaf_2_call_counter + leaf_2_call_counter += 1 + + def leaf_3(): + nonlocal leaf_3_call_counter + leaf_3_call_counter += 1 + + def branch_1(): + nonlocal branch_1_call_counter + branch_1_call_counter += 1 + transaction.on_commit(branch_2) + transaction.on_commit(leaf_3) + + def branch_2(): + nonlocal branch_2_call_counter + branch_2_call_counter += 1 + transaction.on_commit(leaf_1) + transaction.on_commit(leaf_2) + + with self.captureOnCommitCallbacks(execute=True) as callbacks: + transaction.on_commit(branch_1) + + self.assertEqual(branch_1_call_counter, 1) + self.assertEqual(branch_2_call_counter, 1) + self.assertEqual(leaf_1_call_counter, 1) + self.assertEqual(leaf_2_call_counter, 1) + self.assertEqual(leaf_3_call_counter, 1) + + self.assertEqual(callbacks, [branch_1, branch_2, leaf_3, leaf_1, leaf_2]) + + def test_execute_robust(self): + class MyException(Exception): + pass + + def hook(): + self.callback_called = True + raise MyException("robust callback") + + with self.assertLogs("django.test", "ERROR") as cm: + with self.captureOnCommitCallbacks(execute=True) as callbacks: + transaction.on_commit(hook, robust=True) + + self.assertEqual(len(callbacks), 1) + self.assertIs(self.callback_called, True) + + log_record = cm.records[0] + self.assertEqual( + log_record.getMessage(), + "Error calling CaptureOnCommitCallbacksTests.test_execute_robust.." + "hook in on_commit() (robust callback).", + ) + self.assertIsNotNone(log_record.exc_info) + raised_exception = log_record.exc_info[1] + self.assertIsInstance(raised_exception, MyException) + self.assertEqual(str(raised_exception), "robust callback") + + +class DisallowedDatabaseQueriesTests(SimpleTestCase): + def test_disallowed_database_connections(self): + expected_message = ( + "Database connections to 'default' are not allowed in SimpleTestCase " + "subclasses. Either subclass TestCase or TransactionTestCase to " + "ensure proper test isolation or add 'default' to " + "test_utils.tests.DisallowedDatabaseQueriesTests.databases to " + "silence this failure." + ) + with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message): + connection.connect() + with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message): + connection.temporary_connection() + + def test_disallowed_database_queries(self): + expected_message = ( + "Database queries to 'default' are not allowed in SimpleTestCase " + "subclasses. Either subclass TestCase or TransactionTestCase to " + "ensure proper test isolation or add 'default' to " + "test_utils.tests.DisallowedDatabaseQueriesTests.databases to " + "silence this failure." + ) + with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message): + Car.objects.first() + + def test_disallowed_database_chunked_cursor_queries(self): + expected_message = ( + "Database queries to 'default' are not allowed in SimpleTestCase " + "subclasses. Either subclass TestCase or TransactionTestCase to " + "ensure proper test isolation or add 'default' to " + "test_utils.tests.DisallowedDatabaseQueriesTests.databases to " + "silence this failure." + ) + with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message): + next(Car.objects.iterator()) + + +class AllowedDatabaseQueriesTests(SimpleTestCase): + databases = {"default"} + + def test_allowed_database_queries(self): + Car.objects.first() + + def test_allowed_database_chunked_cursor_queries(self): + next(Car.objects.iterator(), None) + + +class DatabaseAliasTests(SimpleTestCase): + def setUp(self): + self.addCleanup(setattr, self.__class__, "databases", self.databases) + + def test_no_close_match(self): + self.__class__.databases = {"void"} + message = ( + "test_utils.tests.DatabaseAliasTests.databases refers to 'void' which is " + "not defined in settings.DATABASES." + ) + with self.assertRaisesMessage(ImproperlyConfigured, message): + self._validate_databases() + + def test_close_match(self): + self.__class__.databases = {"defualt"} + message = ( + "test_utils.tests.DatabaseAliasTests.databases refers to 'defualt' which " + "is not defined in settings.DATABASES. Did you mean 'default'?" + ) + with self.assertRaisesMessage(ImproperlyConfigured, message): + self._validate_databases() + + def test_match(self): + self.__class__.databases = {"default", "other"} + self.assertEqual(self._validate_databases(), frozenset({"default", "other"})) + + def test_all(self): + self.__class__.databases = "__all__" + self.assertEqual(self._validate_databases(), frozenset(connections)) + + +@isolate_apps("test_utils", attr_name="class_apps") +class IsolatedAppsTests(SimpleTestCase): + def test_installed_apps(self): + self.assertEqual( + [app_config.label for app_config in self.class_apps.get_app_configs()], + ["test_utils"], + ) + + def test_class_decoration(self): + class ClassDecoration(models.Model): + pass + + self.assertEqual(ClassDecoration._meta.apps, self.class_apps) + + @isolate_apps("test_utils", kwarg_name="method_apps") + def test_method_decoration(self, method_apps): + class MethodDecoration(models.Model): + pass + + self.assertEqual(MethodDecoration._meta.apps, method_apps) + + def test_context_manager(self): + with isolate_apps("test_utils") as context_apps: + + class ContextManager(models.Model): + pass + + self.assertEqual(ContextManager._meta.apps, context_apps) + + @isolate_apps("test_utils", kwarg_name="method_apps") + def test_nested(self, method_apps): + class MethodDecoration(models.Model): + pass + + with isolate_apps("test_utils") as context_apps: + + class ContextManager(models.Model): + pass + + with isolate_apps("test_utils") as nested_context_apps: + + class NestedContextManager(models.Model): + pass + + self.assertEqual(MethodDecoration._meta.apps, method_apps) + self.assertEqual(ContextManager._meta.apps, context_apps) + self.assertEqual(NestedContextManager._meta.apps, nested_context_apps) + + +class DoNothingDecorator(TestContextDecorator): + def enable(self): + pass + + def disable(self): + pass + + +class TestContextDecoratorTests(SimpleTestCase): + @mock.patch.object(DoNothingDecorator, "disable") + def test_exception_in_setup(self, mock_disable): + """An exception is setUp() is reraised after disable() is called.""" + + class ExceptionInSetUp(unittest.TestCase): + def setUp(self): + raise NotImplementedError("reraised") + + decorator = DoNothingDecorator() + decorated_test_class = decorator.__call__(ExceptionInSetUp)() + self.assertFalse(mock_disable.called) + with self.assertRaisesMessage(NotImplementedError, "reraised"): + decorated_test_class.setUp() + decorated_test_class.doCleanups() + self.assertTrue(mock_disable.called) + + def test_cleanups_run_after_tearDown(self): + calls = [] + + class SaveCallsDecorator(TestContextDecorator): + def enable(self): + calls.append("enable") + + def disable(self): + calls.append("disable") + + class AddCleanupInSetUp(unittest.TestCase): + def setUp(self): + calls.append("setUp") + self.addCleanup(lambda: calls.append("cleanup")) + + decorator = SaveCallsDecorator() + decorated_test_class = decorator.__call__(AddCleanupInSetUp)() + decorated_test_class.setUp() + decorated_test_class.tearDown() + decorated_test_class.doCleanups() + self.assertEqual(calls, ["enable", "setUp", "cleanup", "disable"]) diff --git a/testbed/django__django/tests/test_utils/urls.py b/testbed/django__django/tests/test_utils/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..37d0c76a115fc9dae840541f9e8e4174218fe663 --- /dev/null +++ b/testbed/django__django/tests/test_utils/urls.py @@ -0,0 +1,10 @@ +from django.urls import path + +from . import views + +urlpatterns = [ + path("test_utils/get_person//", views.get_person), + path( + "test_utils/no_template_used/", views.no_template_used, name="no_template_used" + ), +] diff --git a/testbed/django__django/tests/test_utils/views.py b/testbed/django__django/tests/test_utils/views.py new file mode 100644 index 0000000000000000000000000000000000000000..91d71f296da8ecdfc39416d6c0e1466c180a73d6 --- /dev/null +++ b/testbed/django__django/tests/test_utils/views.py @@ -0,0 +1,19 @@ +from django.http import HttpResponse +from django.shortcuts import get_object_or_404 +from django.template import Context, Template + +from .models import Person + + +def get_person(request, pk): + person = get_object_or_404(Person, pk=pk) + return HttpResponse(person.name) + + +def no_template_used(request): + template = Template("This is a string-based template") + return HttpResponse(template.render(Context({}))) + + +def empty_response(request): + return HttpResponse() diff --git a/testbed/django__django/tests/timezones/__init__.py b/testbed/django__django/tests/timezones/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/timezones/admin.py b/testbed/django__django/tests/timezones/admin.py new file mode 100644 index 0000000000000000000000000000000000000000..6761c2d710bacfb04eb9fbcb061957a0bc911bd2 --- /dev/null +++ b/testbed/django__django/tests/timezones/admin.py @@ -0,0 +1,16 @@ +from django.contrib import admin + +from .models import Event, Timestamp + + +class EventAdmin(admin.ModelAdmin): + list_display = ("dt",) + + +class TimestampAdmin(admin.ModelAdmin): + readonly_fields = ("created", "updated") + + +site = admin.AdminSite(name="admin_tz") +site.register(Event, EventAdmin) +site.register(Timestamp, TimestampAdmin) diff --git a/testbed/django__django/tests/timezones/forms.py b/testbed/django__django/tests/timezones/forms.py new file mode 100644 index 0000000000000000000000000000000000000000..a63f3859de343db51ae378b2eca6b115b0994243 --- /dev/null +++ b/testbed/django__django/tests/timezones/forms.py @@ -0,0 +1,28 @@ +from django import forms + +from .models import Event + + +class EventForm(forms.Form): + dt = forms.DateTimeField() + + +class EventSplitForm(forms.Form): + dt = forms.SplitDateTimeField() + + +class EventLocalizedForm(forms.Form): + dt = forms.DateTimeField(localize=True) + + +class EventModelForm(forms.ModelForm): + class Meta: + model = Event + fields = "__all__" + + +class EventLocalizedModelForm(forms.ModelForm): + class Meta: + model = Event + fields = "__all__" + localized_fields = "__all__" diff --git a/testbed/django__django/tests/timezones/models.py b/testbed/django__django/tests/timezones/models.py new file mode 100644 index 0000000000000000000000000000000000000000..49b555b43d3ce9060a0aaf9d229429beb72f9ecd --- /dev/null +++ b/testbed/django__django/tests/timezones/models.py @@ -0,0 +1,31 @@ +from django.db import models + + +class Event(models.Model): + dt = models.DateTimeField() + + +class MaybeEvent(models.Model): + dt = models.DateTimeField(blank=True, null=True) + + +class Session(models.Model): + name = models.CharField(max_length=20) + + +class SessionEvent(models.Model): + dt = models.DateTimeField() + session = models.ForeignKey(Session, models.CASCADE, related_name="events") + + +class Timestamp(models.Model): + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + +class AllDayEvent(models.Model): + day = models.DateField() + + +class DailyEvent(models.Model): + time = models.TimeField() diff --git a/testbed/django__django/tests/timezones/tests.py b/testbed/django__django/tests/timezones/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..433c921cd38378dd3be3f1b06d3905e30e3c7e31 --- /dev/null +++ b/testbed/django__django/tests/timezones/tests.py @@ -0,0 +1,1397 @@ +import datetime +import re +import sys +import zoneinfo +from contextlib import contextmanager +from unittest import SkipTest, skipIf +from xml.dom.minidom import parseString + +from django.contrib.auth.models import User +from django.core import serializers +from django.db import connection +from django.db.models import F, Max, Min +from django.http import HttpRequest +from django.template import ( + Context, + RequestContext, + Template, + TemplateSyntaxError, + context_processors, +) +from django.test import ( + SimpleTestCase, + TestCase, + TransactionTestCase, + override_settings, + skipIfDBFeature, + skipUnlessDBFeature, +) +from django.test.utils import requires_tz_support +from django.urls import reverse +from django.utils import timezone, translation +from django.utils.timezone import timedelta + +from .forms import ( + EventForm, + EventLocalizedForm, + EventLocalizedModelForm, + EventModelForm, + EventSplitForm, +) +from .models import ( + AllDayEvent, + DailyEvent, + Event, + MaybeEvent, + Session, + SessionEvent, + Timestamp, +) + +try: + import yaml + + HAS_YAML = True +except ImportError: + HAS_YAML = False + +# These tests use the EAT (Eastern Africa Time) and ICT (Indochina Time) +# who don't have daylight saving time, so we can represent them easily +# with fixed offset timezones and use them directly as tzinfo in the +# constructors. + +# settings.TIME_ZONE is forced to EAT. Most tests use a variant of +# datetime.datetime(2011, 9, 1, 13, 20, 30), which translates to +# 10:20:30 in UTC and 17:20:30 in ICT. + +UTC = datetime.timezone.utc +EAT = timezone.get_fixed_timezone(180) # Africa/Nairobi +ICT = timezone.get_fixed_timezone(420) # Asia/Bangkok + + +@contextmanager +def override_database_connection_timezone(timezone): + try: + orig_timezone = connection.settings_dict["TIME_ZONE"] + connection.settings_dict["TIME_ZONE"] = timezone + # Clear cached properties, after first accessing them to ensure they exist. + connection.timezone + del connection.timezone + connection.timezone_name + del connection.timezone_name + yield + finally: + connection.settings_dict["TIME_ZONE"] = orig_timezone + # Clear cached properties, after first accessing them to ensure they exist. + connection.timezone + del connection.timezone + connection.timezone_name + del connection.timezone_name + + +@override_settings(TIME_ZONE="Africa/Nairobi", USE_TZ=False) +class LegacyDatabaseTests(TestCase): + def test_naive_datetime(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30) + Event.objects.create(dt=dt) + event = Event.objects.get() + self.assertEqual(event.dt, dt) + + def test_naive_datetime_with_microsecond(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) + Event.objects.create(dt=dt) + event = Event.objects.get() + self.assertEqual(event.dt, dt) + + @skipUnlessDBFeature("supports_timezones") + def test_aware_datetime_in_local_timezone(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) + Event.objects.create(dt=dt) + event = Event.objects.get() + self.assertIsNone(event.dt.tzinfo) + # interpret the naive datetime in local time to get the correct value + self.assertEqual(event.dt.replace(tzinfo=EAT), dt) + + @skipUnlessDBFeature("supports_timezones") + def test_aware_datetime_in_local_timezone_with_microsecond(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT) + Event.objects.create(dt=dt) + event = Event.objects.get() + self.assertIsNone(event.dt.tzinfo) + # interpret the naive datetime in local time to get the correct value + self.assertEqual(event.dt.replace(tzinfo=EAT), dt) + + @skipUnlessDBFeature("supports_timezones") + def test_aware_datetime_in_utc(self): + dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) + Event.objects.create(dt=dt) + event = Event.objects.get() + self.assertIsNone(event.dt.tzinfo) + # interpret the naive datetime in local time to get the correct value + self.assertEqual(event.dt.replace(tzinfo=EAT), dt) + + @skipUnlessDBFeature("supports_timezones") + def test_aware_datetime_in_other_timezone(self): + dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) + Event.objects.create(dt=dt) + event = Event.objects.get() + self.assertIsNone(event.dt.tzinfo) + # interpret the naive datetime in local time to get the correct value + self.assertEqual(event.dt.replace(tzinfo=EAT), dt) + + @skipIfDBFeature("supports_timezones") + def test_aware_datetime_unsupported(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) + msg = "backend does not support timezone-aware datetimes when USE_TZ is False." + with self.assertRaisesMessage(ValueError, msg): + Event.objects.create(dt=dt) + + def test_auto_now_and_auto_now_add(self): + now = datetime.datetime.now() + past = now - datetime.timedelta(seconds=2) + future = now + datetime.timedelta(seconds=2) + Timestamp.objects.create() + ts = Timestamp.objects.get() + self.assertLess(past, ts.created) + self.assertLess(past, ts.updated) + self.assertGreater(future, ts.updated) + self.assertGreater(future, ts.updated) + + def test_query_filter(self): + dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30) + dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30) + Event.objects.create(dt=dt1) + Event.objects.create(dt=dt2) + self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2) + self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1) + self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1) + self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0) + + def test_query_datetime_lookups(self): + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0)) + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0)) + self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2) + self.assertEqual(Event.objects.filter(dt__month=1).count(), 2) + self.assertEqual(Event.objects.filter(dt__day=1).count(), 2) + self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2) + self.assertEqual(Event.objects.filter(dt__iso_week_day=6).count(), 2) + self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1) + self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2) + self.assertEqual(Event.objects.filter(dt__second=0).count(), 2) + + def test_query_aggregation(self): + # Only min and max make sense for datetimes. + Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20)) + Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30)) + Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40)) + result = Event.objects.aggregate(Min("dt"), Max("dt")) + self.assertEqual( + result, + { + "dt__min": datetime.datetime(2011, 9, 1, 3, 20, 40), + "dt__max": datetime.datetime(2011, 9, 1, 23, 20, 20), + }, + ) + + def test_query_annotation(self): + # Only min and max make sense for datetimes. + morning = Session.objects.create(name="morning") + afternoon = Session.objects.create(name="afternoon") + SessionEvent.objects.create( + dt=datetime.datetime(2011, 9, 1, 23, 20, 20), session=afternoon + ) + SessionEvent.objects.create( + dt=datetime.datetime(2011, 9, 1, 13, 20, 30), session=afternoon + ) + SessionEvent.objects.create( + dt=datetime.datetime(2011, 9, 1, 3, 20, 40), session=morning + ) + morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40) + afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30) + self.assertQuerySetEqual( + Session.objects.annotate(dt=Min("events__dt")).order_by("dt"), + [morning_min_dt, afternoon_min_dt], + transform=lambda d: d.dt, + ) + self.assertQuerySetEqual( + Session.objects.annotate(dt=Min("events__dt")).filter( + dt__lt=afternoon_min_dt + ), + [morning_min_dt], + transform=lambda d: d.dt, + ) + self.assertQuerySetEqual( + Session.objects.annotate(dt=Min("events__dt")).filter( + dt__gte=afternoon_min_dt + ), + [afternoon_min_dt], + transform=lambda d: d.dt, + ) + + def test_query_datetimes(self): + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0)) + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0)) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "year"), + [datetime.datetime(2011, 1, 1, 0, 0, 0)], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "month"), + [datetime.datetime(2011, 1, 1, 0, 0, 0)], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "day"), + [datetime.datetime(2011, 1, 1, 0, 0, 0)], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "hour"), + [ + datetime.datetime(2011, 1, 1, 1, 0, 0), + datetime.datetime(2011, 1, 1, 4, 0, 0), + ], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "minute"), + [ + datetime.datetime(2011, 1, 1, 1, 30, 0), + datetime.datetime(2011, 1, 1, 4, 30, 0), + ], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "second"), + [ + datetime.datetime(2011, 1, 1, 1, 30, 0), + datetime.datetime(2011, 1, 1, 4, 30, 0), + ], + ) + + def test_raw_sql(self): + # Regression test for #17755 + dt = datetime.datetime(2011, 9, 1, 13, 20, 30) + event = Event.objects.create(dt=dt) + self.assertEqual( + list( + Event.objects.raw("SELECT * FROM timezones_event WHERE dt = %s", [dt]) + ), + [event], + ) + + def test_cursor_execute_accepts_naive_datetime(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30) + with connection.cursor() as cursor: + cursor.execute("INSERT INTO timezones_event (dt) VALUES (%s)", [dt]) + event = Event.objects.get() + self.assertEqual(event.dt, dt) + + def test_cursor_execute_returns_naive_datetime(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30) + Event.objects.create(dt=dt) + with connection.cursor() as cursor: + cursor.execute("SELECT dt FROM timezones_event WHERE dt = %s", [dt]) + self.assertEqual(cursor.fetchall()[0][0], dt) + + def test_filter_date_field_with_aware_datetime(self): + # Regression test for #17742 + day = datetime.date(2011, 9, 1) + AllDayEvent.objects.create(day=day) + # This is 2011-09-02T01:30:00+03:00 in EAT + dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC) + self.assertTrue(AllDayEvent.objects.filter(day__gte=dt).exists()) + + +@override_settings(TIME_ZONE="Africa/Nairobi", USE_TZ=True) +class NewDatabaseTests(TestCase): + naive_warning = "DateTimeField Event.dt received a naive datetime" + + @skipIfDBFeature("supports_timezones") + def test_aware_time_unsupported(self): + t = datetime.time(13, 20, 30, tzinfo=EAT) + msg = "backend does not support timezone-aware times." + with self.assertRaisesMessage(ValueError, msg): + DailyEvent.objects.create(time=t) + + @requires_tz_support + def test_naive_datetime(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30) + with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): + Event.objects.create(dt=dt) + event = Event.objects.get() + # naive datetimes are interpreted in local time + self.assertEqual(event.dt, dt.replace(tzinfo=EAT)) + + @requires_tz_support + def test_datetime_from_date(self): + dt = datetime.date(2011, 9, 1) + with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): + Event.objects.create(dt=dt) + event = Event.objects.get() + self.assertEqual(event.dt, datetime.datetime(2011, 9, 1, tzinfo=EAT)) + + @requires_tz_support + def test_naive_datetime_with_microsecond(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) + with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): + Event.objects.create(dt=dt) + event = Event.objects.get() + # naive datetimes are interpreted in local time + self.assertEqual(event.dt, dt.replace(tzinfo=EAT)) + + def test_aware_datetime_in_local_timezone(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) + Event.objects.create(dt=dt) + event = Event.objects.get() + self.assertEqual(event.dt, dt) + + def test_aware_datetime_in_local_timezone_with_microsecond(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT) + Event.objects.create(dt=dt) + event = Event.objects.get() + self.assertEqual(event.dt, dt) + + def test_aware_datetime_in_utc(self): + dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) + Event.objects.create(dt=dt) + event = Event.objects.get() + self.assertEqual(event.dt, dt) + + def test_aware_datetime_in_other_timezone(self): + dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) + Event.objects.create(dt=dt) + event = Event.objects.get() + self.assertEqual(event.dt, dt) + + def test_auto_now_and_auto_now_add(self): + now = timezone.now() + past = now - datetime.timedelta(seconds=2) + future = now + datetime.timedelta(seconds=2) + Timestamp.objects.create() + ts = Timestamp.objects.get() + self.assertLess(past, ts.created) + self.assertLess(past, ts.updated) + self.assertGreater(future, ts.updated) + self.assertGreater(future, ts.updated) + + def test_query_filter(self): + dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT) + dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30, tzinfo=EAT) + Event.objects.create(dt=dt1) + Event.objects.create(dt=dt2) + self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2) + self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1) + self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1) + self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0) + + def test_query_filter_with_timezones(self): + tz = zoneinfo.ZoneInfo("Europe/Paris") + dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=tz) + Event.objects.create(dt=dt) + next = dt + datetime.timedelta(seconds=3) + prev = dt - datetime.timedelta(seconds=3) + self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1) + self.assertEqual(Event.objects.filter(dt__exact=next).count(), 0) + self.assertEqual(Event.objects.filter(dt__in=(prev, next)).count(), 0) + self.assertEqual(Event.objects.filter(dt__in=(prev, dt, next)).count(), 1) + self.assertEqual(Event.objects.filter(dt__range=(prev, next)).count(), 1) + + def test_query_convert_timezones(self): + # Connection timezone is equal to the current timezone, datetime + # shouldn't be converted. + with override_database_connection_timezone("Africa/Nairobi"): + event_datetime = datetime.datetime(2016, 1, 2, 23, 10, 11, 123, tzinfo=EAT) + event = Event.objects.create(dt=event_datetime) + self.assertEqual( + Event.objects.filter(dt__date=event_datetime.date()).first(), event + ) + # Connection timezone is not equal to the current timezone, datetime + # should be converted (-4h). + with override_database_connection_timezone("Asia/Bangkok"): + event_datetime = datetime.datetime(2016, 1, 2, 3, 10, 11, tzinfo=ICT) + event = Event.objects.create(dt=event_datetime) + self.assertEqual( + Event.objects.filter(dt__date=datetime.date(2016, 1, 1)).first(), event + ) + + @requires_tz_support + def test_query_filter_with_naive_datetime(self): + dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT) + Event.objects.create(dt=dt) + dt = dt.replace(tzinfo=None) + # naive datetimes are interpreted in local time + with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): + self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1) + with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): + self.assertEqual(Event.objects.filter(dt__lte=dt).count(), 1) + with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): + self.assertEqual(Event.objects.filter(dt__gt=dt).count(), 0) + + @skipUnlessDBFeature("has_zoneinfo_database") + def test_query_datetime_lookups(self): + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) + self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2) + self.assertEqual(Event.objects.filter(dt__month=1).count(), 2) + self.assertEqual(Event.objects.filter(dt__day=1).count(), 2) + self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2) + self.assertEqual(Event.objects.filter(dt__iso_week_day=6).count(), 2) + self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1) + self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2) + self.assertEqual(Event.objects.filter(dt__second=0).count(), 2) + + @skipUnlessDBFeature("has_zoneinfo_database") + def test_query_datetime_lookups_in_other_timezone(self): + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) + with timezone.override(UTC): + # These two dates fall in the same day in EAT, but in different days, + # years and months in UTC. + self.assertEqual(Event.objects.filter(dt__year=2011).count(), 1) + self.assertEqual(Event.objects.filter(dt__month=1).count(), 1) + self.assertEqual(Event.objects.filter(dt__day=1).count(), 1) + self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 1) + self.assertEqual(Event.objects.filter(dt__iso_week_day=6).count(), 1) + self.assertEqual(Event.objects.filter(dt__hour=22).count(), 1) + self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2) + self.assertEqual(Event.objects.filter(dt__second=0).count(), 2) + + def test_query_aggregation(self): + # Only min and max make sense for datetimes. + Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT)) + Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)) + Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT)) + result = Event.objects.aggregate(Min("dt"), Max("dt")) + self.assertEqual( + result, + { + "dt__min": datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT), + "dt__max": datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT), + }, + ) + + def test_query_annotation(self): + # Only min and max make sense for datetimes. + morning = Session.objects.create(name="morning") + afternoon = Session.objects.create(name="afternoon") + SessionEvent.objects.create( + dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT), session=afternoon + ) + SessionEvent.objects.create( + dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), session=afternoon + ) + SessionEvent.objects.create( + dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT), session=morning + ) + morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT) + afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) + self.assertQuerySetEqual( + Session.objects.annotate(dt=Min("events__dt")).order_by("dt"), + [morning_min_dt, afternoon_min_dt], + transform=lambda d: d.dt, + ) + self.assertQuerySetEqual( + Session.objects.annotate(dt=Min("events__dt")).filter( + dt__lt=afternoon_min_dt + ), + [morning_min_dt], + transform=lambda d: d.dt, + ) + self.assertQuerySetEqual( + Session.objects.annotate(dt=Min("events__dt")).filter( + dt__gte=afternoon_min_dt + ), + [afternoon_min_dt], + transform=lambda d: d.dt, + ) + + @skipUnlessDBFeature("has_zoneinfo_database") + def test_query_datetimes(self): + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "year"), + [datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "month"), + [datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "day"), + [datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "hour"), + [ + datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=EAT), + datetime.datetime(2011, 1, 1, 4, 0, 0, tzinfo=EAT), + ], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "minute"), + [ + datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT), + datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT), + ], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "second"), + [ + datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT), + datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT), + ], + ) + + @skipUnlessDBFeature("has_zoneinfo_database") + def test_query_datetimes_in_other_timezone(self): + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) + Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) + with timezone.override(UTC): + self.assertSequenceEqual( + Event.objects.datetimes("dt", "year"), + [ + datetime.datetime(2010, 1, 1, 0, 0, 0, tzinfo=UTC), + datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC), + ], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "month"), + [ + datetime.datetime(2010, 12, 1, 0, 0, 0, tzinfo=UTC), + datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC), + ], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "day"), + [ + datetime.datetime(2010, 12, 31, 0, 0, 0, tzinfo=UTC), + datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC), + ], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "hour"), + [ + datetime.datetime(2010, 12, 31, 22, 0, 0, tzinfo=UTC), + datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=UTC), + ], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "minute"), + [ + datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC), + datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC), + ], + ) + self.assertSequenceEqual( + Event.objects.datetimes("dt", "second"), + [ + datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC), + datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC), + ], + ) + + def test_raw_sql(self): + # Regression test for #17755 + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) + event = Event.objects.create(dt=dt) + self.assertSequenceEqual( + list( + Event.objects.raw("SELECT * FROM timezones_event WHERE dt = %s", [dt]) + ), + [event], + ) + + @skipUnlessDBFeature("supports_timezones") + def test_cursor_execute_accepts_aware_datetime(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) + with connection.cursor() as cursor: + cursor.execute("INSERT INTO timezones_event (dt) VALUES (%s)", [dt]) + event = Event.objects.get() + self.assertEqual(event.dt, dt) + + @skipIfDBFeature("supports_timezones") + def test_cursor_execute_accepts_naive_datetime(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) + utc_naive_dt = timezone.make_naive(dt, datetime.timezone.utc) + with connection.cursor() as cursor: + cursor.execute( + "INSERT INTO timezones_event (dt) VALUES (%s)", [utc_naive_dt] + ) + event = Event.objects.get() + self.assertEqual(event.dt, dt) + + @skipUnlessDBFeature("supports_timezones") + def test_cursor_execute_returns_aware_datetime(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) + Event.objects.create(dt=dt) + with connection.cursor() as cursor: + cursor.execute("SELECT dt FROM timezones_event WHERE dt = %s", [dt]) + self.assertEqual(cursor.fetchall()[0][0], dt) + + @skipIfDBFeature("supports_timezones") + def test_cursor_execute_returns_naive_datetime(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) + utc_naive_dt = timezone.make_naive(dt, datetime.timezone.utc) + Event.objects.create(dt=dt) + with connection.cursor() as cursor: + cursor.execute( + "SELECT dt FROM timezones_event WHERE dt = %s", [utc_naive_dt] + ) + self.assertEqual(cursor.fetchall()[0][0], utc_naive_dt) + + @skipUnlessDBFeature("supports_timezones") + def test_cursor_explicit_time_zone(self): + with override_database_connection_timezone("Europe/Paris"): + with connection.cursor() as cursor: + cursor.execute("SELECT CURRENT_TIMESTAMP") + now = cursor.fetchone()[0] + self.assertEqual(str(now.tzinfo), "Europe/Paris") + + @requires_tz_support + def test_filter_date_field_with_aware_datetime(self): + # Regression test for #17742 + day = datetime.date(2011, 9, 1) + AllDayEvent.objects.create(day=day) + # This is 2011-09-02T01:30:00+03:00 in EAT + dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC) + self.assertFalse(AllDayEvent.objects.filter(day__gte=dt).exists()) + + def test_null_datetime(self): + # Regression test for #17294 + e = MaybeEvent.objects.create() + self.assertIsNone(e.dt) + + def test_update_with_timedelta(self): + initial_dt = timezone.now().replace(microsecond=0) + event = Event.objects.create(dt=initial_dt) + Event.objects.update(dt=F("dt") + timedelta(hours=2)) + event.refresh_from_db() + self.assertEqual(event.dt, initial_dt + timedelta(hours=2)) + + +@override_settings(TIME_ZONE="Africa/Nairobi", USE_TZ=True) +class ForcedTimeZoneDatabaseTests(TransactionTestCase): + """ + Test the TIME_ZONE database configuration parameter. + + Since this involves reading and writing to the same database through two + connections, this is a TransactionTestCase. + """ + + available_apps = ["timezones"] + + @classmethod + def setUpClass(cls): + # @skipIfDBFeature and @skipUnlessDBFeature cannot be chained. The + # outermost takes precedence. Handle skipping manually instead. + if connection.features.supports_timezones: + raise SkipTest("Database has feature(s) supports_timezones") + if not connection.features.test_db_allows_multiple_connections: + raise SkipTest( + "Database doesn't support feature(s): " + "test_db_allows_multiple_connections" + ) + + super().setUpClass() + + def test_read_datetime(self): + fake_dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=UTC) + Event.objects.create(dt=fake_dt) + + with override_database_connection_timezone("Asia/Bangkok"): + event = Event.objects.get() + dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) + self.assertEqual(event.dt, dt) + + def test_write_datetime(self): + dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) + with override_database_connection_timezone("Asia/Bangkok"): + Event.objects.create(dt=dt) + + event = Event.objects.get() + fake_dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=UTC) + self.assertEqual(event.dt, fake_dt) + + +@override_settings(TIME_ZONE="Africa/Nairobi") +class SerializationTests(SimpleTestCase): + # Backend-specific notes: + # - JSON supports only milliseconds, microseconds will be truncated. + # - PyYAML dumps the UTC offset correctly for timezone-aware datetimes. + # When PyYAML < 5.3 loads this representation, it subtracts the offset + # and returns a naive datetime object in UTC. PyYAML 5.3+ loads timezones + # correctly. + # Tests are adapted to take these quirks into account. + + def assert_python_contains_datetime(self, objects, dt): + self.assertEqual(objects[0]["fields"]["dt"], dt) + + def assert_json_contains_datetime(self, json, dt): + self.assertIn('"fields": {"dt": "%s"}' % dt, json) + + def assert_xml_contains_datetime(self, xml, dt): + field = parseString(xml).getElementsByTagName("field")[0] + self.assertXMLEqual(field.childNodes[0].wholeText, dt) + + def assert_yaml_contains_datetime(self, yaml, dt): + # Depending on the yaml dumper, '!timestamp' might be absent + self.assertRegex(yaml, r"\n fields: {dt: !(!timestamp)? '%s'}" % re.escape(dt)) + + def test_naive_datetime(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30) + + data = serializers.serialize("python", [Event(dt=dt)]) + self.assert_python_contains_datetime(data, dt) + obj = next(serializers.deserialize("python", data)).object + self.assertEqual(obj.dt, dt) + + data = serializers.serialize("json", [Event(dt=dt)]) + self.assert_json_contains_datetime(data, "2011-09-01T13:20:30") + obj = next(serializers.deserialize("json", data)).object + self.assertEqual(obj.dt, dt) + + data = serializers.serialize("xml", [Event(dt=dt)]) + self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30") + obj = next(serializers.deserialize("xml", data)).object + self.assertEqual(obj.dt, dt) + + if not isinstance( + serializers.get_serializer("yaml"), serializers.BadSerializer + ): + data = serializers.serialize( + "yaml", [Event(dt=dt)], default_flow_style=None + ) + self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30") + obj = next(serializers.deserialize("yaml", data)).object + self.assertEqual(obj.dt, dt) + + def test_naive_datetime_with_microsecond(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) + + data = serializers.serialize("python", [Event(dt=dt)]) + self.assert_python_contains_datetime(data, dt) + obj = next(serializers.deserialize("python", data)).object + self.assertEqual(obj.dt, dt) + + data = serializers.serialize("json", [Event(dt=dt)]) + self.assert_json_contains_datetime(data, "2011-09-01T13:20:30.405") + obj = next(serializers.deserialize("json", data)).object + self.assertEqual(obj.dt, dt.replace(microsecond=405000)) + + data = serializers.serialize("xml", [Event(dt=dt)]) + self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30.405060") + obj = next(serializers.deserialize("xml", data)).object + self.assertEqual(obj.dt, dt) + + if not isinstance( + serializers.get_serializer("yaml"), serializers.BadSerializer + ): + data = serializers.serialize( + "yaml", [Event(dt=dt)], default_flow_style=None + ) + self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30.405060") + obj = next(serializers.deserialize("yaml", data)).object + self.assertEqual(obj.dt, dt) + + def test_aware_datetime_with_microsecond(self): + dt = datetime.datetime(2011, 9, 1, 17, 20, 30, 405060, tzinfo=ICT) + + data = serializers.serialize("python", [Event(dt=dt)]) + self.assert_python_contains_datetime(data, dt) + obj = next(serializers.deserialize("python", data)).object + self.assertEqual(obj.dt, dt) + + data = serializers.serialize("json", [Event(dt=dt)]) + self.assert_json_contains_datetime(data, "2011-09-01T17:20:30.405+07:00") + obj = next(serializers.deserialize("json", data)).object + self.assertEqual(obj.dt, dt.replace(microsecond=405000)) + + data = serializers.serialize("xml", [Event(dt=dt)]) + self.assert_xml_contains_datetime(data, "2011-09-01T17:20:30.405060+07:00") + obj = next(serializers.deserialize("xml", data)).object + self.assertEqual(obj.dt, dt) + + if not isinstance( + serializers.get_serializer("yaml"), serializers.BadSerializer + ): + data = serializers.serialize( + "yaml", [Event(dt=dt)], default_flow_style=None + ) + self.assert_yaml_contains_datetime(data, "2011-09-01 17:20:30.405060+07:00") + obj = next(serializers.deserialize("yaml", data)).object + if HAS_YAML and yaml.__version__ < "5.3": + self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) + else: + self.assertEqual(obj.dt, dt) + + def test_aware_datetime_in_utc(self): + dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) + + data = serializers.serialize("python", [Event(dt=dt)]) + self.assert_python_contains_datetime(data, dt) + obj = next(serializers.deserialize("python", data)).object + self.assertEqual(obj.dt, dt) + + data = serializers.serialize("json", [Event(dt=dt)]) + self.assert_json_contains_datetime(data, "2011-09-01T10:20:30Z") + obj = next(serializers.deserialize("json", data)).object + self.assertEqual(obj.dt, dt) + + data = serializers.serialize("xml", [Event(dt=dt)]) + self.assert_xml_contains_datetime(data, "2011-09-01T10:20:30+00:00") + obj = next(serializers.deserialize("xml", data)).object + self.assertEqual(obj.dt, dt) + + if not isinstance( + serializers.get_serializer("yaml"), serializers.BadSerializer + ): + data = serializers.serialize( + "yaml", [Event(dt=dt)], default_flow_style=None + ) + self.assert_yaml_contains_datetime(data, "2011-09-01 10:20:30+00:00") + obj = next(serializers.deserialize("yaml", data)).object + self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) + + def test_aware_datetime_in_local_timezone(self): + dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) + + data = serializers.serialize("python", [Event(dt=dt)]) + self.assert_python_contains_datetime(data, dt) + obj = next(serializers.deserialize("python", data)).object + self.assertEqual(obj.dt, dt) + + data = serializers.serialize("json", [Event(dt=dt)]) + self.assert_json_contains_datetime(data, "2011-09-01T13:20:30+03:00") + obj = next(serializers.deserialize("json", data)).object + self.assertEqual(obj.dt, dt) + + data = serializers.serialize("xml", [Event(dt=dt)]) + self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30+03:00") + obj = next(serializers.deserialize("xml", data)).object + self.assertEqual(obj.dt, dt) + + if not isinstance( + serializers.get_serializer("yaml"), serializers.BadSerializer + ): + data = serializers.serialize( + "yaml", [Event(dt=dt)], default_flow_style=None + ) + self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30+03:00") + obj = next(serializers.deserialize("yaml", data)).object + if HAS_YAML and yaml.__version__ < "5.3": + self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) + else: + self.assertEqual(obj.dt, dt) + + def test_aware_datetime_in_other_timezone(self): + dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) + + data = serializers.serialize("python", [Event(dt=dt)]) + self.assert_python_contains_datetime(data, dt) + obj = next(serializers.deserialize("python", data)).object + self.assertEqual(obj.dt, dt) + + data = serializers.serialize("json", [Event(dt=dt)]) + self.assert_json_contains_datetime(data, "2011-09-01T17:20:30+07:00") + obj = next(serializers.deserialize("json", data)).object + self.assertEqual(obj.dt, dt) + + data = serializers.serialize("xml", [Event(dt=dt)]) + self.assert_xml_contains_datetime(data, "2011-09-01T17:20:30+07:00") + obj = next(serializers.deserialize("xml", data)).object + self.assertEqual(obj.dt, dt) + + if not isinstance( + serializers.get_serializer("yaml"), serializers.BadSerializer + ): + data = serializers.serialize( + "yaml", [Event(dt=dt)], default_flow_style=None + ) + self.assert_yaml_contains_datetime(data, "2011-09-01 17:20:30+07:00") + obj = next(serializers.deserialize("yaml", data)).object + if HAS_YAML and yaml.__version__ < "5.3": + self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) + else: + self.assertEqual(obj.dt, dt) + + +@translation.override(None) +@override_settings(DATETIME_FORMAT="c", TIME_ZONE="Africa/Nairobi", USE_TZ=True) +class TemplateTests(SimpleTestCase): + @requires_tz_support + def test_localtime_templatetag_and_filters(self): + """ + Test the {% localtime %} templatetag and related filters. + """ + datetimes = { + "utc": datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), + "eat": datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), + "ict": datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT), + "naive": datetime.datetime(2011, 9, 1, 13, 20, 30), + } + templates = { + "notag": Template( + "{% load tz %}" + "{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}" + ), + "noarg": Template( + "{% load tz %}{% localtime %}{{ dt }}|{{ dt|localtime }}|" + "{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}" + ), + "on": Template( + "{% load tz %}{% localtime on %}{{ dt }}|{{ dt|localtime }}|" + "{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}" + ), + "off": Template( + "{% load tz %}{% localtime off %}{{ dt }}|{{ dt|localtime }}|" + "{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}" + ), + } + + # Transform a list of keys in 'datetimes' to the expected template + # output. This makes the definition of 'results' more readable. + def t(*result): + return "|".join(datetimes[key].isoformat() for key in result) + + # Results for USE_TZ = True + + results = { + "utc": { + "notag": t("eat", "eat", "utc", "ict"), + "noarg": t("eat", "eat", "utc", "ict"), + "on": t("eat", "eat", "utc", "ict"), + "off": t("utc", "eat", "utc", "ict"), + }, + "eat": { + "notag": t("eat", "eat", "utc", "ict"), + "noarg": t("eat", "eat", "utc", "ict"), + "on": t("eat", "eat", "utc", "ict"), + "off": t("eat", "eat", "utc", "ict"), + }, + "ict": { + "notag": t("eat", "eat", "utc", "ict"), + "noarg": t("eat", "eat", "utc", "ict"), + "on": t("eat", "eat", "utc", "ict"), + "off": t("ict", "eat", "utc", "ict"), + }, + "naive": { + "notag": t("naive", "eat", "utc", "ict"), + "noarg": t("naive", "eat", "utc", "ict"), + "on": t("naive", "eat", "utc", "ict"), + "off": t("naive", "eat", "utc", "ict"), + }, + } + + for k1, dt in datetimes.items(): + for k2, tpl in templates.items(): + ctx = Context({"dt": dt, "ICT": ICT}) + actual = tpl.render(ctx) + expected = results[k1][k2] + self.assertEqual( + actual, expected, "%s / %s: %r != %r" % (k1, k2, actual, expected) + ) + + # Changes for USE_TZ = False + + results["utc"]["notag"] = t("utc", "eat", "utc", "ict") + results["ict"]["notag"] = t("ict", "eat", "utc", "ict") + + with self.settings(USE_TZ=False): + for k1, dt in datetimes.items(): + for k2, tpl in templates.items(): + ctx = Context({"dt": dt, "ICT": ICT}) + actual = tpl.render(ctx) + expected = results[k1][k2] + self.assertEqual( + actual, + expected, + "%s / %s: %r != %r" % (k1, k2, actual, expected), + ) + + def test_localtime_filters_with_iana(self): + """ + Test the |localtime, |utc, and |timezone filters with iana zones. + """ + # Use an IANA timezone as local time + tpl = Template("{% load tz %}{{ dt|localtime }}|{{ dt|utc }}") + ctx = Context({"dt": datetime.datetime(2011, 9, 1, 12, 20, 30)}) + + with self.settings(TIME_ZONE="Europe/Paris"): + self.assertEqual( + tpl.render(ctx), "2011-09-01T12:20:30+02:00|2011-09-01T10:20:30+00:00" + ) + + # Use an IANA timezone as argument + tz = zoneinfo.ZoneInfo("Europe/Paris") + tpl = Template("{% load tz %}{{ dt|timezone:tz }}") + ctx = Context( + { + "dt": datetime.datetime(2011, 9, 1, 13, 20, 30), + "tz": tz, + } + ) + self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00") + + def test_localtime_templatetag_invalid_argument(self): + with self.assertRaises(TemplateSyntaxError): + Template("{% load tz %}{% localtime foo %}{% endlocaltime %}").render() + + def test_localtime_filters_do_not_raise_exceptions(self): + """ + Test the |localtime, |utc, and |timezone filters on bad inputs. + """ + tpl = Template( + "{% load tz %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:tz }}" + ) + with self.settings(USE_TZ=True): + # bad datetime value + ctx = Context({"dt": None, "tz": ICT}) + self.assertEqual(tpl.render(ctx), "None|||") + ctx = Context({"dt": "not a date", "tz": ICT}) + self.assertEqual(tpl.render(ctx), "not a date|||") + # bad timezone value + tpl = Template("{% load tz %}{{ dt|timezone:tz }}") + ctx = Context({"dt": datetime.datetime(2011, 9, 1, 13, 20, 30), "tz": None}) + self.assertEqual(tpl.render(ctx), "") + ctx = Context( + {"dt": datetime.datetime(2011, 9, 1, 13, 20, 30), "tz": "not a tz"} + ) + self.assertEqual(tpl.render(ctx), "") + + @requires_tz_support + def test_timezone_templatetag(self): + """ + Test the {% timezone %} templatetag. + """ + tpl = Template( + "{% load tz %}" + "{{ dt }}|" + "{% timezone tz1 %}" + "{{ dt }}|" + "{% timezone tz2 %}" + "{{ dt }}" + "{% endtimezone %}" + "{% endtimezone %}" + ) + ctx = Context( + { + "dt": datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), + "tz1": ICT, + "tz2": None, + } + ) + self.assertEqual( + tpl.render(ctx), + "2011-09-01T13:20:30+03:00|2011-09-01T17:20:30+07:00|" + "2011-09-01T13:20:30+03:00", + ) + + def test_timezone_templatetag_with_iana(self): + """ + Test the {% timezone %} templatetag with IANA time zone providers. + """ + tpl = Template("{% load tz %}{% timezone tz %}{{ dt }}{% endtimezone %}") + + # Use a IANA timezone as argument + tz = zoneinfo.ZoneInfo("Europe/Paris") + ctx = Context( + { + "dt": datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), + "tz": tz, + } + ) + self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00") + + # Use a IANA timezone name as argument + ctx = Context( + { + "dt": datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), + "tz": "Europe/Paris", + } + ) + self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00") + + @skipIf(sys.platform == "win32", "Windows uses non-standard time zone names") + def test_get_current_timezone_templatetag(self): + """ + Test the {% get_current_timezone %} templatetag. + """ + tpl = Template( + "{% load tz %}{% get_current_timezone as time_zone %}{{ time_zone }}" + ) + + self.assertEqual(tpl.render(Context()), "Africa/Nairobi") + with timezone.override(UTC): + self.assertEqual(tpl.render(Context()), "UTC") + + tpl = Template( + "{% load tz %}{% timezone tz %}{% get_current_timezone as time_zone %}" + "{% endtimezone %}{{ time_zone }}" + ) + + self.assertEqual(tpl.render(Context({"tz": ICT})), "+0700") + with timezone.override(UTC): + self.assertEqual(tpl.render(Context({"tz": ICT})), "+0700") + + def test_get_current_timezone_templatetag_with_iana(self): + tpl = Template( + "{% load tz %}{% get_current_timezone as time_zone %}{{ time_zone }}" + ) + tz = zoneinfo.ZoneInfo("Europe/Paris") + with timezone.override(tz): + self.assertEqual(tpl.render(Context()), "Europe/Paris") + + tpl = Template( + "{% load tz %}{% timezone 'Europe/Paris' %}" + "{% get_current_timezone as time_zone %}{% endtimezone %}" + "{{ time_zone }}" + ) + self.assertEqual(tpl.render(Context()), "Europe/Paris") + + def test_get_current_timezone_templatetag_invalid_argument(self): + msg = ( + "'get_current_timezone' requires 'as variable' (got " + "['get_current_timezone'])" + ) + with self.assertRaisesMessage(TemplateSyntaxError, msg): + Template("{% load tz %}{% get_current_timezone %}").render() + + @skipIf(sys.platform == "win32", "Windows uses non-standard time zone names") + def test_tz_template_context_processor(self): + """ + Test the django.template.context_processors.tz template context processor. + """ + tpl = Template("{{ TIME_ZONE }}") + context = Context() + self.assertEqual(tpl.render(context), "") + request_context = RequestContext( + HttpRequest(), processors=[context_processors.tz] + ) + self.assertEqual(tpl.render(request_context), "Africa/Nairobi") + + @requires_tz_support + def test_date_and_time_template_filters(self): + tpl = Template("{{ dt|date:'Y-m-d' }} at {{ dt|time:'H:i:s' }}") + ctx = Context({"dt": datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)}) + self.assertEqual(tpl.render(ctx), "2011-09-01 at 23:20:20") + with timezone.override(ICT): + self.assertEqual(tpl.render(ctx), "2011-09-02 at 03:20:20") + + def test_date_and_time_template_filters_honor_localtime(self): + tpl = Template( + "{% load tz %}{% localtime off %}{{ dt|date:'Y-m-d' }} at " + "{{ dt|time:'H:i:s' }}{% endlocaltime %}" + ) + ctx = Context({"dt": datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)}) + self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20") + with timezone.override(ICT): + self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20") + + @requires_tz_support + def test_now_template_tag_uses_current_time_zone(self): + # Regression for #17343 + tpl = Template('{% now "O" %}') + self.assertEqual(tpl.render(Context({})), "+0300") + with timezone.override(ICT): + self.assertEqual(tpl.render(Context({})), "+0700") + + +@override_settings(DATETIME_FORMAT="c", TIME_ZONE="Africa/Nairobi", USE_TZ=False) +class LegacyFormsTests(TestCase): + def test_form(self): + form = EventForm({"dt": "2011-09-01 13:20:30"}) + self.assertTrue(form.is_valid()) + self.assertEqual( + form.cleaned_data["dt"], datetime.datetime(2011, 9, 1, 13, 20, 30) + ) + + def test_form_with_non_existent_time(self): + form = EventForm({"dt": "2011-03-27 02:30:00"}) + tz = zoneinfo.ZoneInfo("Europe/Paris") + with timezone.override(tz): + # This is a bug. + self.assertTrue(form.is_valid()) + self.assertEqual( + form.cleaned_data["dt"], + datetime.datetime(2011, 3, 27, 2, 30, 0), + ) + + def test_form_with_ambiguous_time(self): + form = EventForm({"dt": "2011-10-30 02:30:00"}) + tz = zoneinfo.ZoneInfo("Europe/Paris") + with timezone.override(tz): + # This is a bug. + self.assertTrue(form.is_valid()) + self.assertEqual( + form.cleaned_data["dt"], + datetime.datetime(2011, 10, 30, 2, 30, 0), + ) + + def test_split_form(self): + form = EventSplitForm({"dt_0": "2011-09-01", "dt_1": "13:20:30"}) + self.assertTrue(form.is_valid()) + self.assertEqual( + form.cleaned_data["dt"], datetime.datetime(2011, 9, 1, 13, 20, 30) + ) + + def test_model_form(self): + EventModelForm({"dt": "2011-09-01 13:20:30"}).save() + e = Event.objects.get() + self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 13, 20, 30)) + + +@override_settings(DATETIME_FORMAT="c", TIME_ZONE="Africa/Nairobi", USE_TZ=True) +class NewFormsTests(TestCase): + @requires_tz_support + def test_form(self): + form = EventForm({"dt": "2011-09-01 13:20:30"}) + self.assertTrue(form.is_valid()) + self.assertEqual( + form.cleaned_data["dt"], + datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), + ) + + def test_form_with_other_timezone(self): + form = EventForm({"dt": "2011-09-01 17:20:30"}) + with timezone.override(ICT): + self.assertTrue(form.is_valid()) + self.assertEqual( + form.cleaned_data["dt"], + datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), + ) + + def test_form_with_non_existent_time(self): + tz = zoneinfo.ZoneInfo("Europe/Paris") + with timezone.override(tz): + form = EventForm({"dt": "2011-03-27 02:30:00"}) + self.assertFalse(form.is_valid()) + self.assertEqual( + form.errors["dt"], + [ + "2011-03-27 02:30:00 couldn’t be interpreted in time zone " + "Europe/Paris; it may be ambiguous or it may not exist." + ], + ) + + def test_form_with_ambiguous_time(self): + tz = zoneinfo.ZoneInfo("Europe/Paris") + with timezone.override(tz): + form = EventForm({"dt": "2011-10-30 02:30:00"}) + self.assertFalse(form.is_valid()) + self.assertEqual( + form.errors["dt"], + [ + "2011-10-30 02:30:00 couldn’t be interpreted in time zone " + "Europe/Paris; it may be ambiguous or it may not exist." + ], + ) + + @requires_tz_support + def test_split_form(self): + form = EventSplitForm({"dt_0": "2011-09-01", "dt_1": "13:20:30"}) + self.assertTrue(form.is_valid()) + self.assertEqual( + form.cleaned_data["dt"], + datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), + ) + + @requires_tz_support + def test_localized_form(self): + form = EventLocalizedForm( + initial={"dt": datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)} + ) + with timezone.override(ICT): + self.assertIn("2011-09-01 17:20:30", str(form)) + + @requires_tz_support + def test_model_form(self): + EventModelForm({"dt": "2011-09-01 13:20:30"}).save() + e = Event.objects.get() + self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)) + + @requires_tz_support + def test_localized_model_form(self): + form = EventLocalizedModelForm( + instance=Event(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)) + ) + with timezone.override(ICT): + self.assertIn("2011-09-01 17:20:30", str(form)) + + +@translation.override(None) +@override_settings( + DATETIME_FORMAT="c", + TIME_ZONE="Africa/Nairobi", + USE_TZ=True, + ROOT_URLCONF="timezones.urls", +) +class AdminTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.u1 = User.objects.create_user( + password="secret", + last_login=datetime.datetime(2007, 5, 30, 13, 20, 10, tzinfo=UTC), + is_superuser=True, + username="super", + first_name="Super", + last_name="User", + email="super@example.com", + is_staff=True, + is_active=True, + date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10, tzinfo=UTC), + ) + + def setUp(self): + self.client.force_login(self.u1) + + @requires_tz_support + def test_changelist(self): + e = Event.objects.create( + dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) + ) + response = self.client.get(reverse("admin_tz:timezones_event_changelist")) + self.assertContains(response, e.dt.astimezone(EAT).isoformat()) + + def test_changelist_in_other_timezone(self): + e = Event.objects.create( + dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) + ) + with timezone.override(ICT): + response = self.client.get(reverse("admin_tz:timezones_event_changelist")) + self.assertContains(response, e.dt.astimezone(ICT).isoformat()) + + @requires_tz_support + def test_change_editable(self): + e = Event.objects.create( + dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) + ) + response = self.client.get( + reverse("admin_tz:timezones_event_change", args=(e.pk,)) + ) + self.assertContains(response, e.dt.astimezone(EAT).date().isoformat()) + self.assertContains(response, e.dt.astimezone(EAT).time().isoformat()) + + def test_change_editable_in_other_timezone(self): + e = Event.objects.create( + dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) + ) + with timezone.override(ICT): + response = self.client.get( + reverse("admin_tz:timezones_event_change", args=(e.pk,)) + ) + self.assertContains(response, e.dt.astimezone(ICT).date().isoformat()) + self.assertContains(response, e.dt.astimezone(ICT).time().isoformat()) + + @requires_tz_support + def test_change_readonly(self): + t = Timestamp.objects.create() + response = self.client.get( + reverse("admin_tz:timezones_timestamp_change", args=(t.pk,)) + ) + self.assertContains(response, t.created.astimezone(EAT).isoformat()) + + def test_change_readonly_in_other_timezone(self): + t = Timestamp.objects.create() + with timezone.override(ICT): + response = self.client.get( + reverse("admin_tz:timezones_timestamp_change", args=(t.pk,)) + ) + self.assertContains(response, t.created.astimezone(ICT).isoformat()) diff --git a/testbed/django__django/tests/timezones/urls.py b/testbed/django__django/tests/timezones/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..d9e99f48c582bdcdfa7d536faf320a71fd7bb3f8 --- /dev/null +++ b/testbed/django__django/tests/timezones/urls.py @@ -0,0 +1,7 @@ +from django.urls import path + +from . import admin as tz_admin # NOQA: register tz_admin + +urlpatterns = [ + path("admin/", tz_admin.site.urls), +] diff --git a/testbed/django__django/tests/transaction_hooks/__init__.py b/testbed/django__django/tests/transaction_hooks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/transaction_hooks/models.py b/testbed/django__django/tests/transaction_hooks/models.py new file mode 100644 index 0000000000000000000000000000000000000000..000fd71ca2089de445d65ab69e539d3259d678c2 --- /dev/null +++ b/testbed/django__django/tests/transaction_hooks/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class Thing(models.Model): + num = models.IntegerField() diff --git a/testbed/django__django/tests/transaction_hooks/tests.py b/testbed/django__django/tests/transaction_hooks/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..938e92575f0c796da36918a2b25a17d528db8a36 --- /dev/null +++ b/testbed/django__django/tests/transaction_hooks/tests.py @@ -0,0 +1,283 @@ +from django.db import connection, transaction +from django.test import TransactionTestCase, skipUnlessDBFeature + +from .models import Thing + + +class ForcedError(Exception): + pass + + +@skipUnlessDBFeature("supports_transactions") +class TestConnectionOnCommit(TransactionTestCase): + """ + Tests for transaction.on_commit(). + + Creation/checking of database objects in parallel with callback tracking is + to verify that the behavior of the two match in all tested cases. + """ + + available_apps = ["transaction_hooks"] + + def setUp(self): + self.notified = [] + + def notify(self, id_): + if id_ == "error": + raise ForcedError() + self.notified.append(id_) + + def do(self, num): + """Create a Thing instance and notify about it.""" + Thing.objects.create(num=num) + transaction.on_commit(lambda: self.notify(num)) + + def assertDone(self, nums): + self.assertNotified(nums) + self.assertEqual(sorted(t.num for t in Thing.objects.all()), sorted(nums)) + + def assertNotified(self, nums): + self.assertEqual(self.notified, nums) + + def test_executes_immediately_if_no_transaction(self): + self.do(1) + self.assertDone([1]) + + def test_robust_if_no_transaction(self): + def robust_callback(): + raise ForcedError("robust callback") + + with self.assertLogs("django.db.backends.base", "ERROR") as cm: + transaction.on_commit(robust_callback, robust=True) + self.do(1) + + self.assertDone([1]) + log_record = cm.records[0] + self.assertEqual( + log_record.getMessage(), + "Error calling TestConnectionOnCommit.test_robust_if_no_transaction." + ".robust_callback in on_commit() (robust callback).", + ) + self.assertIsNotNone(log_record.exc_info) + raised_exception = log_record.exc_info[1] + self.assertIsInstance(raised_exception, ForcedError) + self.assertEqual(str(raised_exception), "robust callback") + + def test_robust_transaction(self): + def robust_callback(): + raise ForcedError("robust callback") + + with self.assertLogs("django.db.backends", "ERROR") as cm: + with transaction.atomic(): + transaction.on_commit(robust_callback, robust=True) + self.do(1) + + self.assertDone([1]) + log_record = cm.records[0] + self.assertEqual( + log_record.getMessage(), + "Error calling TestConnectionOnCommit.test_robust_transaction.." + "robust_callback in on_commit() during transaction (robust callback).", + ) + self.assertIsNotNone(log_record.exc_info) + raised_exception = log_record.exc_info[1] + self.assertIsInstance(raised_exception, ForcedError) + self.assertEqual(str(raised_exception), "robust callback") + + def test_delays_execution_until_after_transaction_commit(self): + with transaction.atomic(): + self.do(1) + self.assertNotified([]) + self.assertDone([1]) + + def test_does_not_execute_if_transaction_rolled_back(self): + try: + with transaction.atomic(): + self.do(1) + raise ForcedError() + except ForcedError: + pass + + self.assertDone([]) + + def test_executes_only_after_final_transaction_committed(self): + with transaction.atomic(): + with transaction.atomic(): + self.do(1) + self.assertNotified([]) + self.assertNotified([]) + self.assertDone([1]) + + def test_discards_hooks_from_rolled_back_savepoint(self): + with transaction.atomic(): + # one successful savepoint + with transaction.atomic(): + self.do(1) + # one failed savepoint + try: + with transaction.atomic(): + self.do(2) + raise ForcedError() + except ForcedError: + pass + # another successful savepoint + with transaction.atomic(): + self.do(3) + + # only hooks registered during successful savepoints execute + self.assertDone([1, 3]) + + def test_no_hooks_run_from_failed_transaction(self): + """If outer transaction fails, no hooks from within it run.""" + try: + with transaction.atomic(): + with transaction.atomic(): + self.do(1) + raise ForcedError() + except ForcedError: + pass + + self.assertDone([]) + + def test_inner_savepoint_rolled_back_with_outer(self): + with transaction.atomic(): + try: + with transaction.atomic(): + with transaction.atomic(): + self.do(1) + raise ForcedError() + except ForcedError: + pass + self.do(2) + + self.assertDone([2]) + + def test_no_savepoints_atomic_merged_with_outer(self): + with transaction.atomic(): + with transaction.atomic(): + self.do(1) + try: + with transaction.atomic(savepoint=False): + raise ForcedError() + except ForcedError: + pass + + self.assertDone([]) + + def test_inner_savepoint_does_not_affect_outer(self): + with transaction.atomic(): + with transaction.atomic(): + self.do(1) + try: + with transaction.atomic(): + raise ForcedError() + except ForcedError: + pass + + self.assertDone([1]) + + def test_runs_hooks_in_order_registered(self): + with transaction.atomic(): + self.do(1) + with transaction.atomic(): + self.do(2) + self.do(3) + + self.assertDone([1, 2, 3]) + + def test_hooks_cleared_after_successful_commit(self): + with transaction.atomic(): + self.do(1) + with transaction.atomic(): + self.do(2) + + self.assertDone([1, 2]) # not [1, 1, 2] + + def test_hooks_cleared_after_rollback(self): + try: + with transaction.atomic(): + self.do(1) + raise ForcedError() + except ForcedError: + pass + + with transaction.atomic(): + self.do(2) + + self.assertDone([2]) + + @skipUnlessDBFeature("test_db_allows_multiple_connections") + def test_hooks_cleared_on_reconnect(self): + with transaction.atomic(): + self.do(1) + connection.close() + + connection.connect() + + with transaction.atomic(): + self.do(2) + + self.assertDone([2]) + + def test_error_in_hook_doesnt_prevent_clearing_hooks(self): + try: + with transaction.atomic(): + transaction.on_commit(lambda: self.notify("error")) + except ForcedError: + pass + + with transaction.atomic(): + self.do(1) + + self.assertDone([1]) + + def test_db_query_in_hook(self): + with transaction.atomic(): + Thing.objects.create(num=1) + transaction.on_commit( + lambda: [self.notify(t.num) for t in Thing.objects.all()] + ) + + self.assertDone([1]) + + def test_transaction_in_hook(self): + def on_commit(): + with transaction.atomic(): + t = Thing.objects.create(num=1) + self.notify(t.num) + + with transaction.atomic(): + transaction.on_commit(on_commit) + + self.assertDone([1]) + + def test_hook_in_hook(self): + def on_commit(i, add_hook): + with transaction.atomic(): + if add_hook: + transaction.on_commit(lambda: on_commit(i + 10, False)) + t = Thing.objects.create(num=i) + self.notify(t.num) + + with transaction.atomic(): + transaction.on_commit(lambda: on_commit(1, True)) + transaction.on_commit(lambda: on_commit(2, True)) + + self.assertDone([1, 11, 2, 12]) + + def test_raises_exception_non_autocommit_mode(self): + def should_never_be_called(): + raise AssertionError("this function should never be called") + + try: + connection.set_autocommit(False) + msg = "on_commit() cannot be used in manual transaction management" + with self.assertRaisesMessage(transaction.TransactionManagementError, msg): + transaction.on_commit(should_never_be_called) + finally: + connection.set_autocommit(True) + + def test_raises_exception_non_callable(self): + msg = "on_commit()'s callback must be a callable." + with self.assertRaisesMessage(TypeError, msg): + transaction.on_commit(None) diff --git a/testbed/django__django/tests/transactions/__init__.py b/testbed/django__django/tests/transactions/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/transactions/models.py b/testbed/django__django/tests/transactions/models.py new file mode 100644 index 0000000000000000000000000000000000000000..9506bace3516b0f117f2124ab0a8d18aca58aa17 --- /dev/null +++ b/testbed/django__django/tests/transactions/models.py @@ -0,0 +1,21 @@ +""" +Transactions + +Django handles transactions in three different ways. The default is to commit +each transaction upon a write, but you can decorate a function to get +commit-on-success behavior. Alternatively, you can manage the transaction +manually. +""" +from django.db import models + + +class Reporter(models.Model): + first_name = models.CharField(max_length=30) + last_name = models.CharField(max_length=30) + email = models.EmailField() + + class Meta: + ordering = ("first_name", "last_name") + + def __str__(self): + return ("%s %s" % (self.first_name, self.last_name)).strip() diff --git a/testbed/django__django/tests/transactions/tests.py b/testbed/django__django/tests/transactions/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..2419eb47f2e75d2d2d362683435622c9c09103d7 --- /dev/null +++ b/testbed/django__django/tests/transactions/tests.py @@ -0,0 +1,583 @@ +import sys +import threading +import time +from unittest import skipIf, skipUnless + +from django.db import ( + DatabaseError, + Error, + IntegrityError, + OperationalError, + connection, + transaction, +) +from django.test import ( + TestCase, + TransactionTestCase, + skipIfDBFeature, + skipUnlessDBFeature, +) + +from .models import Reporter + + +@skipUnlessDBFeature("uses_savepoints") +class AtomicTests(TransactionTestCase): + """ + Tests for the atomic decorator and context manager. + + The tests make assertions on internal attributes because there isn't a + robust way to ask the database for its current transaction state. + + Since the decorator syntax is converted into a context manager (see the + implementation), there are only a few basic tests with the decorator + syntax and the bulk of the tests use the context manager syntax. + """ + + available_apps = ["transactions"] + + def test_decorator_syntax_commit(self): + @transaction.atomic + def make_reporter(): + return Reporter.objects.create(first_name="Tintin") + + reporter = make_reporter() + self.assertSequenceEqual(Reporter.objects.all(), [reporter]) + + def test_decorator_syntax_rollback(self): + @transaction.atomic + def make_reporter(): + Reporter.objects.create(first_name="Haddock") + raise Exception("Oops, that's his last name") + + with self.assertRaisesMessage(Exception, "Oops"): + make_reporter() + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_alternate_decorator_syntax_commit(self): + @transaction.atomic() + def make_reporter(): + return Reporter.objects.create(first_name="Tintin") + + reporter = make_reporter() + self.assertSequenceEqual(Reporter.objects.all(), [reporter]) + + def test_alternate_decorator_syntax_rollback(self): + @transaction.atomic() + def make_reporter(): + Reporter.objects.create(first_name="Haddock") + raise Exception("Oops, that's his last name") + + with self.assertRaisesMessage(Exception, "Oops"): + make_reporter() + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_commit(self): + with transaction.atomic(): + reporter = Reporter.objects.create(first_name="Tintin") + self.assertSequenceEqual(Reporter.objects.all(), [reporter]) + + def test_rollback(self): + with self.assertRaisesMessage(Exception, "Oops"): + with transaction.atomic(): + Reporter.objects.create(first_name="Haddock") + raise Exception("Oops, that's his last name") + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_nested_commit_commit(self): + with transaction.atomic(): + reporter1 = Reporter.objects.create(first_name="Tintin") + with transaction.atomic(): + reporter2 = Reporter.objects.create( + first_name="Archibald", last_name="Haddock" + ) + self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1]) + + def test_nested_commit_rollback(self): + with transaction.atomic(): + reporter = Reporter.objects.create(first_name="Tintin") + with self.assertRaisesMessage(Exception, "Oops"): + with transaction.atomic(): + Reporter.objects.create(first_name="Haddock") + raise Exception("Oops, that's his last name") + self.assertSequenceEqual(Reporter.objects.all(), [reporter]) + + def test_nested_rollback_commit(self): + with self.assertRaisesMessage(Exception, "Oops"): + with transaction.atomic(): + Reporter.objects.create(last_name="Tintin") + with transaction.atomic(): + Reporter.objects.create(last_name="Haddock") + raise Exception("Oops, that's his first name") + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_nested_rollback_rollback(self): + with self.assertRaisesMessage(Exception, "Oops"): + with transaction.atomic(): + Reporter.objects.create(last_name="Tintin") + with self.assertRaisesMessage(Exception, "Oops"): + with transaction.atomic(): + Reporter.objects.create(first_name="Haddock") + raise Exception("Oops, that's his last name") + raise Exception("Oops, that's his first name") + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_merged_commit_commit(self): + with transaction.atomic(): + reporter1 = Reporter.objects.create(first_name="Tintin") + with transaction.atomic(savepoint=False): + reporter2 = Reporter.objects.create( + first_name="Archibald", last_name="Haddock" + ) + self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1]) + + def test_merged_commit_rollback(self): + with transaction.atomic(): + Reporter.objects.create(first_name="Tintin") + with self.assertRaisesMessage(Exception, "Oops"): + with transaction.atomic(savepoint=False): + Reporter.objects.create(first_name="Haddock") + raise Exception("Oops, that's his last name") + # Writes in the outer block are rolled back too. + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_merged_rollback_commit(self): + with self.assertRaisesMessage(Exception, "Oops"): + with transaction.atomic(): + Reporter.objects.create(last_name="Tintin") + with transaction.atomic(savepoint=False): + Reporter.objects.create(last_name="Haddock") + raise Exception("Oops, that's his first name") + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_merged_rollback_rollback(self): + with self.assertRaisesMessage(Exception, "Oops"): + with transaction.atomic(): + Reporter.objects.create(last_name="Tintin") + with self.assertRaisesMessage(Exception, "Oops"): + with transaction.atomic(savepoint=False): + Reporter.objects.create(first_name="Haddock") + raise Exception("Oops, that's his last name") + raise Exception("Oops, that's his first name") + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_reuse_commit_commit(self): + atomic = transaction.atomic() + with atomic: + reporter1 = Reporter.objects.create(first_name="Tintin") + with atomic: + reporter2 = Reporter.objects.create( + first_name="Archibald", last_name="Haddock" + ) + self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1]) + + def test_reuse_commit_rollback(self): + atomic = transaction.atomic() + with atomic: + reporter = Reporter.objects.create(first_name="Tintin") + with self.assertRaisesMessage(Exception, "Oops"): + with atomic: + Reporter.objects.create(first_name="Haddock") + raise Exception("Oops, that's his last name") + self.assertSequenceEqual(Reporter.objects.all(), [reporter]) + + def test_reuse_rollback_commit(self): + atomic = transaction.atomic() + with self.assertRaisesMessage(Exception, "Oops"): + with atomic: + Reporter.objects.create(last_name="Tintin") + with atomic: + Reporter.objects.create(last_name="Haddock") + raise Exception("Oops, that's his first name") + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_reuse_rollback_rollback(self): + atomic = transaction.atomic() + with self.assertRaisesMessage(Exception, "Oops"): + with atomic: + Reporter.objects.create(last_name="Tintin") + with self.assertRaisesMessage(Exception, "Oops"): + with atomic: + Reporter.objects.create(first_name="Haddock") + raise Exception("Oops, that's his last name") + raise Exception("Oops, that's his first name") + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_force_rollback(self): + with transaction.atomic(): + Reporter.objects.create(first_name="Tintin") + # atomic block shouldn't rollback, but force it. + self.assertFalse(transaction.get_rollback()) + transaction.set_rollback(True) + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_prevent_rollback(self): + with transaction.atomic(): + reporter = Reporter.objects.create(first_name="Tintin") + sid = transaction.savepoint() + # trigger a database error inside an inner atomic without savepoint + with self.assertRaises(DatabaseError): + with transaction.atomic(savepoint=False): + with connection.cursor() as cursor: + cursor.execute("SELECT no_such_col FROM transactions_reporter") + # prevent atomic from rolling back since we're recovering manually + self.assertTrue(transaction.get_rollback()) + transaction.set_rollback(False) + transaction.savepoint_rollback(sid) + self.assertSequenceEqual(Reporter.objects.all(), [reporter]) + + @skipUnlessDBFeature("can_release_savepoints") + def test_failure_on_exit_transaction(self): + with transaction.atomic(): + with self.assertRaises(DatabaseError): + with transaction.atomic(): + Reporter.objects.create(last_name="Tintin") + self.assertEqual(len(Reporter.objects.all()), 1) + # Incorrect savepoint id to provoke a database error. + connection.savepoint_ids.append("12") + with self.assertRaises(transaction.TransactionManagementError): + len(Reporter.objects.all()) + self.assertIs(connection.needs_rollback, True) + if connection.savepoint_ids: + connection.savepoint_ids.pop() + self.assertSequenceEqual(Reporter.objects.all(), []) + + +class AtomicInsideTransactionTests(AtomicTests): + """All basic tests for atomic should also pass within an existing transaction.""" + + def setUp(self): + self.atomic = transaction.atomic() + self.atomic.__enter__() + + def tearDown(self): + self.atomic.__exit__(*sys.exc_info()) + + +class AtomicWithoutAutocommitTests(AtomicTests): + """All basic tests for atomic should also pass when autocommit is turned off.""" + + def setUp(self): + transaction.set_autocommit(False) + + def tearDown(self): + # The tests access the database after exercising 'atomic', initiating + # a transaction ; a rollback is required before restoring autocommit. + transaction.rollback() + transaction.set_autocommit(True) + + +@skipUnlessDBFeature("uses_savepoints") +class AtomicMergeTests(TransactionTestCase): + """Test merging transactions with savepoint=False.""" + + available_apps = ["transactions"] + + def test_merged_outer_rollback(self): + with transaction.atomic(): + Reporter.objects.create(first_name="Tintin") + with transaction.atomic(savepoint=False): + Reporter.objects.create(first_name="Archibald", last_name="Haddock") + with self.assertRaisesMessage(Exception, "Oops"): + with transaction.atomic(savepoint=False): + Reporter.objects.create(first_name="Calculus") + raise Exception("Oops, that's his last name") + # The third insert couldn't be roll back. Temporarily mark the + # connection as not needing rollback to check it. + self.assertTrue(transaction.get_rollback()) + transaction.set_rollback(False) + self.assertEqual(Reporter.objects.count(), 3) + transaction.set_rollback(True) + # The second insert couldn't be roll back. Temporarily mark the + # connection as not needing rollback to check it. + self.assertTrue(transaction.get_rollback()) + transaction.set_rollback(False) + self.assertEqual(Reporter.objects.count(), 3) + transaction.set_rollback(True) + # The first block has a savepoint and must roll back. + self.assertSequenceEqual(Reporter.objects.all(), []) + + def test_merged_inner_savepoint_rollback(self): + with transaction.atomic(): + reporter = Reporter.objects.create(first_name="Tintin") + with transaction.atomic(): + Reporter.objects.create(first_name="Archibald", last_name="Haddock") + with self.assertRaisesMessage(Exception, "Oops"): + with transaction.atomic(savepoint=False): + Reporter.objects.create(first_name="Calculus") + raise Exception("Oops, that's his last name") + # The third insert couldn't be roll back. Temporarily mark the + # connection as not needing rollback to check it. + self.assertTrue(transaction.get_rollback()) + transaction.set_rollback(False) + self.assertEqual(Reporter.objects.count(), 3) + transaction.set_rollback(True) + # The second block has a savepoint and must roll back. + self.assertEqual(Reporter.objects.count(), 1) + self.assertSequenceEqual(Reporter.objects.all(), [reporter]) + + +@skipUnlessDBFeature("uses_savepoints") +class AtomicErrorsTests(TransactionTestCase): + available_apps = ["transactions"] + forbidden_atomic_msg = "This is forbidden when an 'atomic' block is active." + + def test_atomic_prevents_setting_autocommit(self): + autocommit = transaction.get_autocommit() + with transaction.atomic(): + with self.assertRaisesMessage( + transaction.TransactionManagementError, self.forbidden_atomic_msg + ): + transaction.set_autocommit(not autocommit) + # Make sure autocommit wasn't changed. + self.assertEqual(connection.autocommit, autocommit) + + def test_atomic_prevents_calling_transaction_methods(self): + with transaction.atomic(): + with self.assertRaisesMessage( + transaction.TransactionManagementError, self.forbidden_atomic_msg + ): + transaction.commit() + with self.assertRaisesMessage( + transaction.TransactionManagementError, self.forbidden_atomic_msg + ): + transaction.rollback() + + def test_atomic_prevents_queries_in_broken_transaction(self): + r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock") + with transaction.atomic(): + r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id) + with self.assertRaises(IntegrityError): + r2.save(force_insert=True) + # The transaction is marked as needing rollback. + msg = ( + "An error occurred in the current transaction. You can't " + "execute queries until the end of the 'atomic' block." + ) + with self.assertRaisesMessage( + transaction.TransactionManagementError, msg + ) as cm: + r2.save(force_update=True) + self.assertIsInstance(cm.exception.__cause__, IntegrityError) + self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Haddock") + + @skipIfDBFeature("atomic_transactions") + def test_atomic_allows_queries_after_fixing_transaction(self): + r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock") + with transaction.atomic(): + r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id) + with self.assertRaises(IntegrityError): + r2.save(force_insert=True) + # Mark the transaction as no longer needing rollback. + transaction.set_rollback(False) + r2.save(force_update=True) + self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Calculus") + + @skipUnlessDBFeature("test_db_allows_multiple_connections") + def test_atomic_prevents_queries_in_broken_transaction_after_client_close(self): + with transaction.atomic(): + Reporter.objects.create(first_name="Archibald", last_name="Haddock") + connection.close() + # The connection is closed and the transaction is marked as + # needing rollback. This will raise an InterfaceError on databases + # that refuse to create cursors on closed connections (PostgreSQL) + # and a TransactionManagementError on other databases. + with self.assertRaises(Error): + Reporter.objects.create(first_name="Cuthbert", last_name="Calculus") + # The connection is usable again . + self.assertEqual(Reporter.objects.count(), 0) + + +@skipUnlessDBFeature("uses_savepoints") +@skipUnless(connection.vendor == "mysql", "MySQL-specific behaviors") +class AtomicMySQLTests(TransactionTestCase): + available_apps = ["transactions"] + + @skipIf(threading is None, "Test requires threading") + def test_implicit_savepoint_rollback(self): + """MySQL implicitly rolls back savepoints when it deadlocks (#22291).""" + Reporter.objects.create(id=1) + Reporter.objects.create(id=2) + + main_thread_ready = threading.Event() + + def other_thread(): + try: + with transaction.atomic(): + Reporter.objects.select_for_update().get(id=1) + main_thread_ready.wait() + # 1) This line locks... (see below for 2) + Reporter.objects.exclude(id=1).update(id=2) + finally: + # This is the thread-local connection, not the main connection. + connection.close() + + other_thread = threading.Thread(target=other_thread) + other_thread.start() + + with self.assertRaisesMessage(OperationalError, "Deadlock found"): + # Double atomic to enter a transaction and create a savepoint. + with transaction.atomic(): + with transaction.atomic(): + Reporter.objects.select_for_update().get(id=2) + main_thread_ready.set() + # The two threads can't be synchronized with an event here + # because the other thread locks. Sleep for a little while. + time.sleep(1) + # 2) ... and this line deadlocks. (see above for 1) + Reporter.objects.exclude(id=2).update(id=1) + + other_thread.join() + + +class AtomicMiscTests(TransactionTestCase): + available_apps = ["transactions"] + + def test_wrap_callable_instance(self): + """#20028 -- Atomic must support wrapping callable instances.""" + + class Callable: + def __call__(self): + pass + + # Must not raise an exception + transaction.atomic(Callable()) + + @skipUnlessDBFeature("can_release_savepoints") + def test_atomic_does_not_leak_savepoints_on_failure(self): + """#23074 -- Savepoints must be released after rollback.""" + + # Expect an error when rolling back a savepoint that doesn't exist. + # Done outside of the transaction block to ensure proper recovery. + with self.assertRaises(Error): + # Start a plain transaction. + with transaction.atomic(): + # Swallow the intentional error raised in the sub-transaction. + with self.assertRaisesMessage(Exception, "Oops"): + # Start a sub-transaction with a savepoint. + with transaction.atomic(): + sid = connection.savepoint_ids[-1] + raise Exception("Oops") + + # This is expected to fail because the savepoint no longer exists. + connection.savepoint_rollback(sid) + + def test_mark_for_rollback_on_error_in_transaction(self): + with transaction.atomic(savepoint=False): + # Swallow the intentional error raised. + with self.assertRaisesMessage(Exception, "Oops"): + # Wrap in `mark_for_rollback_on_error` to check if the + # transaction is marked broken. + with transaction.mark_for_rollback_on_error(): + # Ensure that we are still in a good state. + self.assertFalse(transaction.get_rollback()) + + raise Exception("Oops") + + # mark_for_rollback_on_error marked the transaction as broken … + self.assertTrue(transaction.get_rollback()) + + # … and further queries fail. + msg = "You can't execute queries until the end of the 'atomic' block." + with self.assertRaisesMessage(transaction.TransactionManagementError, msg): + Reporter.objects.create() + + # Transaction errors are reset at the end of an transaction, so this + # should just work. + Reporter.objects.create() + + def test_mark_for_rollback_on_error_in_autocommit(self): + self.assertTrue(transaction.get_autocommit()) + + # Swallow the intentional error raised. + with self.assertRaisesMessage(Exception, "Oops"): + # Wrap in `mark_for_rollback_on_error` to check if the transaction + # is marked broken. + with transaction.mark_for_rollback_on_error(): + # Ensure that we are still in a good state. + self.assertFalse(transaction.get_connection().needs_rollback) + + raise Exception("Oops") + + # Ensure that `mark_for_rollback_on_error` did not mark the transaction + # as broken, since we are in autocommit mode … + self.assertFalse(transaction.get_connection().needs_rollback) + + # … and further queries work nicely. + Reporter.objects.create() + + +class NonAutocommitTests(TransactionTestCase): + available_apps = [] + + def setUp(self): + transaction.set_autocommit(False) + + def tearDown(self): + transaction.rollback() + transaction.set_autocommit(True) + + def test_orm_query_after_error_and_rollback(self): + """ + ORM queries are allowed after an error and a rollback in non-autocommit + mode (#27504). + """ + r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock") + r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id) + with self.assertRaises(IntegrityError): + r2.save(force_insert=True) + transaction.rollback() + Reporter.objects.last() + + def test_orm_query_without_autocommit(self): + """#24921 -- ORM queries must be possible after set_autocommit(False).""" + Reporter.objects.create(first_name="Tintin") + + +class DurableTestsBase: + available_apps = ["transactions"] + + def test_commit(self): + with transaction.atomic(durable=True): + reporter = Reporter.objects.create(first_name="Tintin") + self.assertEqual(Reporter.objects.get(), reporter) + + def test_nested_outer_durable(self): + with transaction.atomic(durable=True): + reporter1 = Reporter.objects.create(first_name="Tintin") + with transaction.atomic(): + reporter2 = Reporter.objects.create( + first_name="Archibald", + last_name="Haddock", + ) + self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1]) + + def test_nested_both_durable(self): + msg = "A durable atomic block cannot be nested within another atomic block." + with transaction.atomic(durable=True): + with self.assertRaisesMessage(RuntimeError, msg): + with transaction.atomic(durable=True): + pass + + def test_nested_inner_durable(self): + msg = "A durable atomic block cannot be nested within another atomic block." + with transaction.atomic(): + with self.assertRaisesMessage(RuntimeError, msg): + with transaction.atomic(durable=True): + pass + + def test_sequence_of_durables(self): + with transaction.atomic(durable=True): + reporter = Reporter.objects.create(first_name="Tintin 1") + self.assertEqual(Reporter.objects.get(first_name="Tintin 1"), reporter) + with transaction.atomic(durable=True): + reporter = Reporter.objects.create(first_name="Tintin 2") + self.assertEqual(Reporter.objects.get(first_name="Tintin 2"), reporter) + + +class DurableTransactionTests(DurableTestsBase, TransactionTestCase): + pass + + +class DurableTests(DurableTestsBase, TestCase): + pass diff --git a/testbed/django__django/tests/unmanaged_models/__init__.py b/testbed/django__django/tests/unmanaged_models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/unmanaged_models/models.py b/testbed/django__django/tests/unmanaged_models/models.py new file mode 100644 index 0000000000000000000000000000000000000000..0eefcafda2a6f84fc6643d1ac4fbf775698e66cf --- /dev/null +++ b/testbed/django__django/tests/unmanaged_models/models.py @@ -0,0 +1,120 @@ +""" +Models can have a ``managed`` attribute, which specifies whether the SQL code +is generated for the table on various manage.py operations. +""" + +from django.db import models + +# All of these models are created in the database by Django. + + +class A01(models.Model): + f_a = models.CharField(max_length=10, db_index=True) + f_b = models.IntegerField() + + class Meta: + db_table = "a01" + + +class B01(models.Model): + fk_a = models.ForeignKey(A01, models.CASCADE) + f_a = models.CharField(max_length=10, db_index=True) + f_b = models.IntegerField() + + class Meta: + db_table = "b01" + # 'managed' is True by default. This tests we can set it explicitly. + managed = True + + +class C01(models.Model): + mm_a = models.ManyToManyField(A01, db_table="d01") + f_a = models.CharField(max_length=10, db_index=True) + f_b = models.IntegerField() + + class Meta: + db_table = "c01" + + +# All of these models use the same tables as the previous set (they are shadows +# of possibly a subset of the columns). There should be no creation errors, +# since we have told Django they aren't managed by Django. + + +class A02(models.Model): + f_a = models.CharField(max_length=10, db_index=True) + + class Meta: + db_table = "a01" + managed = False + + +class B02(models.Model): + class Meta: + db_table = "b01" + managed = False + + fk_a = models.ForeignKey(A02, models.CASCADE) + f_a = models.CharField(max_length=10, db_index=True) + f_b = models.IntegerField() + + +# To re-use the many-to-many intermediate table, we need to manually set up +# things up. +class C02(models.Model): + mm_a = models.ManyToManyField(A02, through="Intermediate") + f_a = models.CharField(max_length=10, db_index=True) + f_b = models.IntegerField() + + class Meta: + db_table = "c01" + managed = False + + +class Intermediate(models.Model): + a02 = models.ForeignKey(A02, models.CASCADE, db_column="a01_id") + c02 = models.ForeignKey(C02, models.CASCADE, db_column="c01_id") + + class Meta: + db_table = "d01" + managed = False + + +# These next models test the creation (or not) of many to many join tables +# between managed and unmanaged models. A join table between two unmanaged +# models shouldn't be automatically created (see #10647). +# + + +# Firstly, we need some models that will create the tables, purely so that the +# tables are created. This is a test setup, not a requirement for unmanaged +# models. +class Proxy1(models.Model): + class Meta: + db_table = "unmanaged_models_proxy1" + + +class Proxy2(models.Model): + class Meta: + db_table = "unmanaged_models_proxy2" + + +class Unmanaged1(models.Model): + class Meta: + managed = False + db_table = "unmanaged_models_proxy1" + + +# Unmanaged with an m2m to unmanaged: the intermediary table won't be created. +class Unmanaged2(models.Model): + mm = models.ManyToManyField(Unmanaged1) + + class Meta: + managed = False + db_table = "unmanaged_models_proxy2" + + +# Here's an unmanaged model with an m2m to a managed one; the intermediary +# table *will* be created (unless given a custom `through` as for C02 above). +class Managed1(models.Model): + mm = models.ManyToManyField(Unmanaged1) diff --git a/testbed/django__django/tests/unmanaged_models/tests.py b/testbed/django__django/tests/unmanaged_models/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..dc75bdc9939b49db6090dcbd2e20e69e735a8862 --- /dev/null +++ b/testbed/django__django/tests/unmanaged_models/tests.py @@ -0,0 +1,60 @@ +from django.db import connection +from django.test import TestCase + +from .models import A01, A02, B01, B02, C01, C02, Managed1, Unmanaged2 + + +class SimpleTests(TestCase): + def test_simple(self): + """ + The main test here is that the all the models can be created without + any database errors. We can also do some more simple insertion and + lookup tests while we're here to show that the second of models do + refer to the tables from the first set. + """ + # Insert some data into one set of models. + a = A01.objects.create(f_a="foo", f_b=42) + B01.objects.create(fk_a=a, f_a="fred", f_b=1729) + c = C01.objects.create(f_a="barney", f_b=1) + c.mm_a.set([a]) + + # ... and pull it out via the other set. + a2 = A02.objects.all()[0] + self.assertIsInstance(a2, A02) + self.assertEqual(a2.f_a, "foo") + + b2 = B02.objects.all()[0] + self.assertIsInstance(b2, B02) + self.assertEqual(b2.f_a, "fred") + + self.assertIsInstance(b2.fk_a, A02) + self.assertEqual(b2.fk_a.f_a, "foo") + + self.assertEqual(list(C02.objects.filter(f_a=None)), []) + + resp = list(C02.objects.filter(mm_a=a.id)) + self.assertEqual(len(resp), 1) + + self.assertIsInstance(resp[0], C02) + self.assertEqual(resp[0].f_a, "barney") + + +class ManyToManyUnmanagedTests(TestCase): + def test_many_to_many_between_unmanaged(self): + """ + The intermediary table between two unmanaged models should not be created. + """ + table = Unmanaged2._meta.get_field("mm").m2m_db_table() + tables = connection.introspection.table_names() + self.assertNotIn( + table, tables, "Table '%s' should not exist, but it does." % table + ) + + def test_many_to_many_between_unmanaged_and_managed(self): + """ + An intermediary table between a managed and an unmanaged model should + be created. + """ + table = Managed1._meta.get_field("mm").m2m_db_table() + tables = connection.introspection.table_names() + self.assertIn(table, tables, "Table '%s' does not exist." % table) diff --git a/testbed/django__django/tests/update/__init__.py b/testbed/django__django/tests/update/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/update/models.py b/testbed/django__django/tests/update/models.py new file mode 100644 index 0000000000000000000000000000000000000000..d71fc887c7d3792b4e2ac568d343bc1c69ccc055 --- /dev/null +++ b/testbed/django__django/tests/update/models.py @@ -0,0 +1,53 @@ +""" +Tests for the update() queryset method that allows in-place, multi-object +updates. +""" + +from django.db import models + + +class DataPoint(models.Model): + name = models.CharField(max_length=20) + value = models.CharField(max_length=20) + another_value = models.CharField(max_length=20, blank=True) + is_active = models.BooleanField(default=True) + + +class RelatedPoint(models.Model): + name = models.CharField(max_length=20) + data = models.ForeignKey(DataPoint, models.CASCADE) + + +class A(models.Model): + x = models.IntegerField(default=10) + + +class B(models.Model): + a = models.ForeignKey(A, models.CASCADE) + y = models.IntegerField(default=10) + + +class C(models.Model): + y = models.IntegerField(default=10) + + +class D(C): + a = models.ForeignKey(A, models.CASCADE) + + +class Foo(models.Model): + target = models.CharField(max_length=10, unique=True) + + +class Bar(models.Model): + foo = models.ForeignKey(Foo, models.CASCADE, to_field="target") + m2m_foo = models.ManyToManyField(Foo, related_name="m2m_foo") + x = models.IntegerField(default=0) + + +class UniqueNumber(models.Model): + number = models.IntegerField(unique=True) + + +class UniqueNumberChild(UniqueNumber): + pass diff --git a/testbed/django__django/tests/update/tests.py b/testbed/django__django/tests/update/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..079e00818ad0ea0907163d3e1fc0e2d186993f53 --- /dev/null +++ b/testbed/django__django/tests/update/tests.py @@ -0,0 +1,348 @@ +import unittest + +from django.core.exceptions import FieldError +from django.db import IntegrityError, connection, transaction +from django.db.models import Case, CharField, Count, F, IntegerField, Max, When +from django.db.models.functions import Abs, Concat, Lower +from django.test import TestCase +from django.test.utils import register_lookup + +from .models import ( + A, + B, + Bar, + D, + DataPoint, + Foo, + RelatedPoint, + UniqueNumber, + UniqueNumberChild, +) + + +class SimpleTest(TestCase): + @classmethod + def setUpTestData(cls): + cls.a1 = A.objects.create() + cls.a2 = A.objects.create() + for x in range(20): + B.objects.create(a=cls.a1) + D.objects.create(a=cls.a1) + + def test_nonempty_update(self): + """ + Update changes the right number of rows for a nonempty queryset + """ + num_updated = self.a1.b_set.update(y=100) + self.assertEqual(num_updated, 20) + cnt = B.objects.filter(y=100).count() + self.assertEqual(cnt, 20) + + def test_empty_update(self): + """ + Update changes the right number of rows for an empty queryset + """ + num_updated = self.a2.b_set.update(y=100) + self.assertEqual(num_updated, 0) + cnt = B.objects.filter(y=100).count() + self.assertEqual(cnt, 0) + + def test_nonempty_update_with_inheritance(self): + """ + Update changes the right number of rows for an empty queryset + when the update affects only a base table + """ + num_updated = self.a1.d_set.update(y=100) + self.assertEqual(num_updated, 20) + cnt = D.objects.filter(y=100).count() + self.assertEqual(cnt, 20) + + def test_empty_update_with_inheritance(self): + """ + Update changes the right number of rows for an empty queryset + when the update affects only a base table + """ + num_updated = self.a2.d_set.update(y=100) + self.assertEqual(num_updated, 0) + cnt = D.objects.filter(y=100).count() + self.assertEqual(cnt, 0) + + def test_foreign_key_update_with_id(self): + """ + Update works using _id for foreign keys + """ + num_updated = self.a1.d_set.update(a_id=self.a2) + self.assertEqual(num_updated, 20) + self.assertEqual(self.a2.d_set.count(), 20) + + +class AdvancedTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.d0 = DataPoint.objects.create(name="d0", value="apple") + cls.d2 = DataPoint.objects.create(name="d2", value="banana") + cls.d3 = DataPoint.objects.create(name="d3", value="banana", is_active=False) + cls.r1 = RelatedPoint.objects.create(name="r1", data=cls.d3) + + def test_update(self): + """ + Objects are updated by first filtering the candidates into a queryset + and then calling the update() method. It executes immediately and + returns nothing. + """ + resp = DataPoint.objects.filter(value="apple").update(name="d1") + self.assertEqual(resp, 1) + resp = DataPoint.objects.filter(value="apple") + self.assertEqual(list(resp), [self.d0]) + + def test_update_multiple_objects(self): + """ + We can update multiple objects at once. + """ + resp = DataPoint.objects.filter(value="banana").update(value="pineapple") + self.assertEqual(resp, 2) + self.assertEqual(DataPoint.objects.get(name="d2").value, "pineapple") + + def test_update_fk(self): + """ + Foreign key fields can also be updated, although you can only update + the object referred to, not anything inside the related object. + """ + resp = RelatedPoint.objects.filter(name="r1").update(data=self.d0) + self.assertEqual(resp, 1) + resp = RelatedPoint.objects.filter(data__name="d0") + self.assertEqual(list(resp), [self.r1]) + + def test_update_multiple_fields(self): + """ + Multiple fields can be updated at once + """ + resp = DataPoint.objects.filter(value="apple").update( + value="fruit", another_value="peach" + ) + self.assertEqual(resp, 1) + d = DataPoint.objects.get(name="d0") + self.assertEqual(d.value, "fruit") + self.assertEqual(d.another_value, "peach") + + def test_update_all(self): + """ + In the rare case you want to update every instance of a model, update() + is also a manager method. + """ + self.assertEqual(DataPoint.objects.update(value="thing"), 3) + resp = DataPoint.objects.values("value").distinct() + self.assertEqual(list(resp), [{"value": "thing"}]) + + def test_update_slice_fail(self): + """ + We do not support update on already sliced query sets. + """ + method = DataPoint.objects.all()[:2].update + msg = "Cannot update a query once a slice has been taken." + with self.assertRaisesMessage(TypeError, msg): + method(another_value="another thing") + + def test_update_respects_to_field(self): + """ + Update of an FK field which specifies a to_field works. + """ + a_foo = Foo.objects.create(target="aaa") + b_foo = Foo.objects.create(target="bbb") + bar = Bar.objects.create(foo=a_foo) + self.assertEqual(bar.foo_id, a_foo.target) + bar_qs = Bar.objects.filter(pk=bar.pk) + self.assertEqual(bar_qs[0].foo_id, a_foo.target) + bar_qs.update(foo=b_foo) + self.assertEqual(bar_qs[0].foo_id, b_foo.target) + + def test_update_m2m_field(self): + msg = ( + "Cannot update model field " + " " + "(only non-relations and foreign keys permitted)." + ) + with self.assertRaisesMessage(FieldError, msg): + Bar.objects.update(m2m_foo="whatever") + + def test_update_transformed_field(self): + A.objects.create(x=5) + A.objects.create(x=-6) + with register_lookup(IntegerField, Abs): + A.objects.update(x=F("x__abs")) + self.assertCountEqual(A.objects.values_list("x", flat=True), [5, 6]) + + def test_update_annotated_queryset(self): + """ + Update of a queryset that's been annotated. + """ + # Trivial annotated update + qs = DataPoint.objects.annotate(alias=F("value")) + self.assertEqual(qs.update(another_value="foo"), 3) + # Update where annotation is used for filtering + qs = DataPoint.objects.annotate(alias=F("value")).filter(alias="apple") + self.assertEqual(qs.update(another_value="foo"), 1) + # Update where annotation is used in update parameters + qs = DataPoint.objects.annotate(alias=F("value")) + self.assertEqual(qs.update(another_value=F("alias")), 3) + # Update where aggregation annotation is used in update parameters + qs = DataPoint.objects.annotate(max=Max("value")) + msg = ( + "Aggregate functions are not allowed in this query " + "(another_value=Max(Col(update_datapoint, update.DataPoint.value)))." + ) + with self.assertRaisesMessage(FieldError, msg): + qs.update(another_value=F("max")) + + def test_update_annotated_multi_table_queryset(self): + """ + Update of a queryset that's been annotated and involves multiple tables. + """ + # Trivial annotated update + qs = DataPoint.objects.annotate(related_count=Count("relatedpoint")) + self.assertEqual(qs.update(value="Foo"), 3) + # Update where annotation is used for filtering + qs = DataPoint.objects.annotate(related_count=Count("relatedpoint")) + self.assertEqual(qs.filter(related_count=1).update(value="Foo"), 1) + # Update where aggregation annotation is used in update parameters + qs = RelatedPoint.objects.annotate(max=Max("data__value")) + msg = "Joined field references are not permitted in this query" + with self.assertRaisesMessage(FieldError, msg): + qs.update(name=F("max")) + + def test_update_with_joined_field_annotation(self): + msg = "Joined field references are not permitted in this query" + with register_lookup(CharField, Lower): + for annotation in ( + F("data__name"), + F("data__name__lower"), + Lower("data__name"), + Concat("data__name", "data__value"), + ): + with self.subTest(annotation=annotation): + with self.assertRaisesMessage(FieldError, msg): + RelatedPoint.objects.annotate( + new_name=annotation, + ).update(name=F("new_name")) + + def test_update_ordered_by_m2m_aggregation_annotation(self): + msg = ( + "Cannot update when ordering by an aggregate: " + "Count(Col(update_bar_m2m_foo, update.Bar_m2m_foo.foo))" + ) + with self.assertRaisesMessage(FieldError, msg): + Bar.objects.annotate(m2m_count=Count("m2m_foo")).order_by( + "m2m_count" + ).update(x=2) + + def test_update_ordered_by_inline_m2m_annotation(self): + foo = Foo.objects.create(target="test") + Bar.objects.create(foo=foo) + + Bar.objects.order_by(Abs("m2m_foo")).update(x=2) + self.assertEqual(Bar.objects.get().x, 2) + + def test_update_ordered_by_m2m_annotation(self): + foo = Foo.objects.create(target="test") + Bar.objects.create(foo=foo) + + Bar.objects.annotate(abs_id=Abs("m2m_foo")).order_by("abs_id").update(x=3) + self.assertEqual(Bar.objects.get().x, 3) + + def test_update_ordered_by_m2m_annotation_desc(self): + foo = Foo.objects.create(target="test") + Bar.objects.create(foo=foo) + + Bar.objects.annotate(abs_id=Abs("m2m_foo")).order_by("-abs_id").update(x=4) + self.assertEqual(Bar.objects.get().x, 4) + + def test_update_negated_f(self): + DataPoint.objects.update(is_active=~F("is_active")) + self.assertCountEqual( + DataPoint.objects.values_list("name", "is_active"), + [("d0", False), ("d2", False), ("d3", True)], + ) + DataPoint.objects.update(is_active=~F("is_active")) + self.assertCountEqual( + DataPoint.objects.values_list("name", "is_active"), + [("d0", True), ("d2", True), ("d3", False)], + ) + + def test_update_negated_f_conditional_annotation(self): + DataPoint.objects.annotate( + is_d2=Case(When(name="d2", then=True), default=False) + ).update(is_active=~F("is_d2")) + self.assertCountEqual( + DataPoint.objects.values_list("name", "is_active"), + [("d0", True), ("d2", False), ("d3", True)], + ) + + def test_updating_non_conditional_field(self): + msg = "Cannot negate non-conditional expressions." + with self.assertRaisesMessage(TypeError, msg): + DataPoint.objects.update(is_active=~F("name")) + + +@unittest.skipUnless( + connection.vendor == "mysql", + "UPDATE...ORDER BY syntax is supported on MySQL/MariaDB", +) +class MySQLUpdateOrderByTest(TestCase): + """Update field with a unique constraint using an ordered queryset.""" + + @classmethod + def setUpTestData(cls): + UniqueNumber.objects.create(number=1) + UniqueNumber.objects.create(number=2) + + def test_order_by_update_on_unique_constraint(self): + tests = [ + ("-number", "id"), + (F("number").desc(), "id"), + (F("number") * -1, "id"), + ] + for ordering in tests: + with self.subTest(ordering=ordering), transaction.atomic(): + updated = UniqueNumber.objects.order_by(*ordering).update( + number=F("number") + 1, + ) + self.assertEqual(updated, 2) + + def test_order_by_update_on_unique_constraint_annotation(self): + updated = ( + UniqueNumber.objects.annotate(number_inverse=F("number").desc()) + .order_by("number_inverse") + .update(number=F("number") + 1) + ) + self.assertEqual(updated, 2) + + def test_order_by_update_on_unique_constraint_annotation_desc(self): + updated = ( + UniqueNumber.objects.annotate(number_annotation=F("number")) + .order_by("-number_annotation") + .update(number=F("number") + 1) + ) + self.assertEqual(updated, 2) + + def test_order_by_update_on_parent_unique_constraint(self): + # Ordering by inherited fields is omitted because joined fields cannot + # be used in the ORDER BY clause. + UniqueNumberChild.objects.create(number=3) + UniqueNumberChild.objects.create(number=4) + with self.assertRaises(IntegrityError): + UniqueNumberChild.objects.order_by("number").update( + number=F("number") + 1, + ) + + def test_order_by_update_on_related_field(self): + # Ordering by related fields is omitted because joined fields cannot be + # used in the ORDER BY clause. + data = DataPoint.objects.create(name="d0", value="apple") + related = RelatedPoint.objects.create(name="r0", data=data) + with self.assertNumQueries(1) as ctx: + updated = RelatedPoint.objects.order_by("data__name").update(name="new") + sql = ctx.captured_queries[0]["sql"] + self.assertNotIn("ORDER BY", sql) + self.assertEqual(updated, 1) + related.refresh_from_db() + self.assertEqual(related.name, "new") diff --git a/testbed/django__django/tests/update_only_fields/__init__.py b/testbed/django__django/tests/update_only_fields/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/update_only_fields/models.py b/testbed/django__django/tests/update_only_fields/models.py new file mode 100644 index 0000000000000000000000000000000000000000..4810d5b191692e2aadcf4a2e296f8928c250c5dc --- /dev/null +++ b/testbed/django__django/tests/update_only_fields/models.py @@ -0,0 +1,43 @@ +from django.db import models + + +class Account(models.Model): + num = models.IntegerField() + + +class Person(models.Model): + GENDER_CHOICES = ( + ("M", "Male"), + ("F", "Female"), + ) + name = models.CharField(max_length=20) + gender = models.CharField(max_length=1, choices=GENDER_CHOICES) + pid = models.IntegerField(null=True, default=None) + + +class Employee(Person): + employee_num = models.IntegerField(default=0) + profile = models.ForeignKey( + "Profile", models.SET_NULL, related_name="profiles", null=True + ) + accounts = models.ManyToManyField("Account", related_name="employees", blank=True) + + +class NonConcreteField(models.IntegerField): + def db_type(self, connection): + return None + + def get_attname_column(self): + attname, _ = super().get_attname_column() + return attname, None + + +class Profile(models.Model): + name = models.CharField(max_length=200) + salary = models.FloatField(default=1000.0) + non_concrete = NonConcreteField() + + +class ProxyEmployee(Employee): + class Meta: + proxy = True diff --git a/testbed/django__django/tests/update_only_fields/tests.py b/testbed/django__django/tests/update_only_fields/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..6c23ae27d8d559c1094d5658ba90b4f8de14ad69 --- /dev/null +++ b/testbed/django__django/tests/update_only_fields/tests.py @@ -0,0 +1,284 @@ +from django.db.models.signals import post_save, pre_save +from django.test import TestCase + +from .models import Account, Employee, Person, Profile, ProxyEmployee + + +class UpdateOnlyFieldsTests(TestCase): + msg = ( + "The following fields do not exist in this model, are m2m fields, or " + "are non-concrete fields: %s" + ) + + def test_update_fields_basic(self): + s = Person.objects.create(name="Sara", gender="F") + self.assertEqual(s.gender, "F") + + s.gender = "M" + s.name = "Ian" + s.save(update_fields=["name"]) + + s = Person.objects.get(pk=s.pk) + self.assertEqual(s.gender, "F") + self.assertEqual(s.name, "Ian") + + def test_update_fields_deferred(self): + s = Person.objects.create(name="Sara", gender="F", pid=22) + self.assertEqual(s.gender, "F") + + s1 = Person.objects.defer("gender", "pid").get(pk=s.pk) + s1.name = "Emily" + s1.gender = "M" + + with self.assertNumQueries(1): + s1.save() + + s2 = Person.objects.get(pk=s1.pk) + self.assertEqual(s2.name, "Emily") + self.assertEqual(s2.gender, "M") + + def test_update_fields_only_1(self): + s = Person.objects.create(name="Sara", gender="F") + self.assertEqual(s.gender, "F") + + s1 = Person.objects.only("name").get(pk=s.pk) + s1.name = "Emily" + s1.gender = "M" + + with self.assertNumQueries(1): + s1.save() + + s2 = Person.objects.get(pk=s1.pk) + self.assertEqual(s2.name, "Emily") + self.assertEqual(s2.gender, "M") + + def test_update_fields_only_2(self): + s = Person.objects.create(name="Sara", gender="F", pid=22) + self.assertEqual(s.gender, "F") + + s1 = Person.objects.only("name").get(pk=s.pk) + s1.name = "Emily" + s1.gender = "M" + + with self.assertNumQueries(2): + s1.save(update_fields=["pid"]) + + s2 = Person.objects.get(pk=s1.pk) + self.assertEqual(s2.name, "Sara") + self.assertEqual(s2.gender, "F") + + def test_update_fields_only_repeated(self): + s = Person.objects.create(name="Sara", gender="F") + self.assertEqual(s.gender, "F") + + s1 = Person.objects.only("name").get(pk=s.pk) + s1.gender = "M" + with self.assertNumQueries(1): + s1.save() + # save() should not fetch deferred fields + s1 = Person.objects.only("name").get(pk=s.pk) + with self.assertNumQueries(1): + s1.save() + + def test_update_fields_inheritance_defer(self): + profile_boss = Profile.objects.create(name="Boss", salary=3000) + e1 = Employee.objects.create( + name="Sara", gender="F", employee_num=1, profile=profile_boss + ) + e1 = Employee.objects.only("name").get(pk=e1.pk) + e1.name = "Linda" + with self.assertNumQueries(1): + e1.save() + self.assertEqual(Employee.objects.get(pk=e1.pk).name, "Linda") + + def test_update_fields_fk_defer(self): + profile_boss = Profile.objects.create(name="Boss", salary=3000) + profile_receptionist = Profile.objects.create(name="Receptionist", salary=1000) + e1 = Employee.objects.create( + name="Sara", gender="F", employee_num=1, profile=profile_boss + ) + e1 = Employee.objects.only("profile").get(pk=e1.pk) + e1.profile = profile_receptionist + with self.assertNumQueries(1): + e1.save() + self.assertEqual(Employee.objects.get(pk=e1.pk).profile, profile_receptionist) + e1.profile_id = profile_boss.pk + with self.assertNumQueries(1): + e1.save() + self.assertEqual(Employee.objects.get(pk=e1.pk).profile, profile_boss) + + def test_select_related_only_interaction(self): + profile_boss = Profile.objects.create(name="Boss", salary=3000) + e1 = Employee.objects.create( + name="Sara", gender="F", employee_num=1, profile=profile_boss + ) + e1 = ( + Employee.objects.only("profile__salary") + .select_related("profile") + .get(pk=e1.pk) + ) + profile_boss.name = "Clerk" + profile_boss.salary = 1000 + profile_boss.save() + # The loaded salary of 3000 gets saved, the name of 'Clerk' isn't + # overwritten. + with self.assertNumQueries(1): + e1.profile.save() + reloaded_profile = Profile.objects.get(pk=profile_boss.pk) + self.assertEqual(reloaded_profile.name, profile_boss.name) + self.assertEqual(reloaded_profile.salary, 3000) + + def test_update_fields_m2m(self): + profile_boss = Profile.objects.create(name="Boss", salary=3000) + e1 = Employee.objects.create( + name="Sara", gender="F", employee_num=1, profile=profile_boss + ) + a1 = Account.objects.create(num=1) + a2 = Account.objects.create(num=2) + e1.accounts.set([a1, a2]) + + with self.assertRaisesMessage(ValueError, self.msg % "accounts"): + e1.save(update_fields=["accounts"]) + + def test_update_fields_inheritance(self): + profile_boss = Profile.objects.create(name="Boss", salary=3000) + profile_receptionist = Profile.objects.create(name="Receptionist", salary=1000) + e1 = Employee.objects.create( + name="Sara", gender="F", employee_num=1, profile=profile_boss + ) + + e1.name = "Ian" + e1.gender = "M" + e1.save(update_fields=["name"]) + + e2 = Employee.objects.get(pk=e1.pk) + self.assertEqual(e2.name, "Ian") + self.assertEqual(e2.gender, "F") + self.assertEqual(e2.profile, profile_boss) + + e2.profile = profile_receptionist + e2.name = "Sara" + e2.save(update_fields=["profile"]) + + e3 = Employee.objects.get(pk=e1.pk) + self.assertEqual(e3.name, "Ian") + self.assertEqual(e3.profile, profile_receptionist) + + with self.assertNumQueries(1): + e3.profile = profile_boss + e3.save(update_fields=["profile_id"]) + + e4 = Employee.objects.get(pk=e3.pk) + self.assertEqual(e4.profile, profile_boss) + self.assertEqual(e4.profile_id, profile_boss.pk) + + def test_update_fields_inheritance_with_proxy_model(self): + profile_boss = Profile.objects.create(name="Boss", salary=3000) + profile_receptionist = Profile.objects.create(name="Receptionist", salary=1000) + e1 = ProxyEmployee.objects.create( + name="Sara", gender="F", employee_num=1, profile=profile_boss + ) + + e1.name = "Ian" + e1.gender = "M" + e1.save(update_fields=["name"]) + + e2 = ProxyEmployee.objects.get(pk=e1.pk) + self.assertEqual(e2.name, "Ian") + self.assertEqual(e2.gender, "F") + self.assertEqual(e2.profile, profile_boss) + + e2.profile = profile_receptionist + e2.name = "Sara" + e2.save(update_fields=["profile"]) + + e3 = ProxyEmployee.objects.get(pk=e1.pk) + self.assertEqual(e3.name, "Ian") + self.assertEqual(e3.profile, profile_receptionist) + + def test_update_fields_signals(self): + p = Person.objects.create(name="Sara", gender="F") + pre_save_data = [] + + def pre_save_receiver(**kwargs): + pre_save_data.append(kwargs["update_fields"]) + + pre_save.connect(pre_save_receiver) + post_save_data = [] + + def post_save_receiver(**kwargs): + post_save_data.append(kwargs["update_fields"]) + + post_save.connect(post_save_receiver) + p.save(update_fields=["name"]) + self.assertEqual(len(pre_save_data), 1) + self.assertEqual(len(pre_save_data[0]), 1) + self.assertIn("name", pre_save_data[0]) + self.assertEqual(len(post_save_data), 1) + self.assertEqual(len(post_save_data[0]), 1) + self.assertIn("name", post_save_data[0]) + + pre_save.disconnect(pre_save_receiver) + post_save.disconnect(post_save_receiver) + + def test_update_fields_incorrect_params(self): + s = Person.objects.create(name="Sara", gender="F") + + with self.assertRaisesMessage(ValueError, self.msg % "first_name"): + s.save(update_fields=["first_name"]) + + # "name" is treated as an iterable so the output is something like + # "n, a, m, e" but the order isn't deterministic. + with self.assertRaisesMessage(ValueError, self.msg % ""): + s.save(update_fields="name") + + def test_empty_update_fields(self): + s = Person.objects.create(name="Sara", gender="F") + pre_save_data = [] + + def pre_save_receiver(**kwargs): + pre_save_data.append(kwargs["update_fields"]) + + pre_save.connect(pre_save_receiver) + post_save_data = [] + + def post_save_receiver(**kwargs): + post_save_data.append(kwargs["update_fields"]) + + post_save.connect(post_save_receiver) + # Save is skipped. + with self.assertNumQueries(0): + s.save(update_fields=[]) + # Signals were skipped, too... + self.assertEqual(len(pre_save_data), 0) + self.assertEqual(len(post_save_data), 0) + + pre_save.disconnect(pre_save_receiver) + post_save.disconnect(post_save_receiver) + + def test_num_queries_inheritance(self): + s = Employee.objects.create(name="Sara", gender="F") + s.employee_num = 1 + s.name = "Emily" + with self.assertNumQueries(1): + s.save(update_fields=["employee_num"]) + s = Employee.objects.get(pk=s.pk) + self.assertEqual(s.employee_num, 1) + self.assertEqual(s.name, "Sara") + s.employee_num = 2 + s.name = "Emily" + with self.assertNumQueries(1): + s.save(update_fields=["name"]) + s = Employee.objects.get(pk=s.pk) + self.assertEqual(s.name, "Emily") + self.assertEqual(s.employee_num, 1) + # A little sanity check that we actually did updates... + self.assertEqual(Employee.objects.count(), 1) + self.assertEqual(Person.objects.count(), 1) + with self.assertNumQueries(2): + s.save(update_fields=["name", "employee_num"]) + + def test_update_non_concrete_field(self): + profile_boss = Profile.objects.create(name="Boss", salary=3000) + with self.assertRaisesMessage(ValueError, self.msg % "non_concrete"): + profile_boss.save(update_fields=["non_concrete"]) diff --git a/testbed/django__django/tests/urlpatterns/__init__.py b/testbed/django__django/tests/urlpatterns/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/urlpatterns/converter_urls.py b/testbed/django__django/tests/urlpatterns/converter_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..0de4b8e5350c5d40ac32bb0d96f75ba6c2cb7733 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns/converter_urls.py @@ -0,0 +1,8 @@ +from django.urls import path + +from . import views + +urlpatterns = [ + path("{x}/<{x}:{x}>/".format(x=name), views.empty_view, name=name) + for name in ("int", "path", "slug", "str", "uuid") +] diff --git a/testbed/django__django/tests/urlpatterns/converters.py b/testbed/django__django/tests/urlpatterns/converters.py new file mode 100644 index 0000000000000000000000000000000000000000..9e2af062698f72964c82819c476360598f98b754 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns/converters.py @@ -0,0 +1,38 @@ +import base64 + + +class Base64Converter: + regex = r"[a-zA-Z0-9+/]*={0,2}" + + def to_python(self, value): + return base64.b64decode(value) + + def to_url(self, value): + return base64.b64encode(value).decode("ascii") + + +class DynamicConverter: + _dynamic_to_python = None + _dynamic_to_url = None + + @property + def regex(self): + return r"[0-9a-zA-Z]+" + + @regex.setter + def regex(self): + raise Exception("You can't modify the regular expression.") + + def to_python(self, value): + return type(self)._dynamic_to_python(value) + + def to_url(self, value): + return type(self)._dynamic_to_url(value) + + @classmethod + def register_to_python(cls, value): + cls._dynamic_to_python = value + + @classmethod + def register_to_url(cls, value): + cls._dynamic_to_url = value diff --git a/testbed/django__django/tests/urlpatterns/included_urls.py b/testbed/django__django/tests/urlpatterns/included_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..957f0e2598123e48bf10e6875f71ebbe4178c813 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns/included_urls.py @@ -0,0 +1,8 @@ +from django.urls import include, path + +from . import views + +urlpatterns = [ + path("extra//", views.empty_view, name="inner-extra"), + path("", include("urlpatterns.more_urls")), +] diff --git a/testbed/django__django/tests/urlpatterns/more_urls.py b/testbed/django__django/tests/urlpatterns/more_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..8fc3abd51867a235f9bc1fea1ecbaf9d4a07d5b3 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns/more_urls.py @@ -0,0 +1,12 @@ +from django.urls import re_path + +from . import views + +urlpatterns = [ + re_path( + r"^more/(?P\w+)/$", + views.empty_view, + {"sub-extra": True}, + name="inner-more", + ), +] diff --git a/testbed/django__django/tests/urlpatterns/path_base64_urls.py b/testbed/django__django/tests/urlpatterns/path_base64_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..3ab14dbb7936f445d55e3544f70cdb664cd9c11f --- /dev/null +++ b/testbed/django__django/tests/urlpatterns/path_base64_urls.py @@ -0,0 +1,27 @@ +from django.urls import include, path, register_converter + +from . import converters, views + +register_converter(converters.Base64Converter, "base64") + +subsubpatterns = [ + path("/", views.empty_view, name="subsubpattern-base64"), +] + +subpatterns = [ + path("/", views.empty_view, name="subpattern-base64"), + path( + "/", + include( + (subsubpatterns, "second-layer-namespaced-base64"), "instance-ns-base64" + ), + ), +] + +urlpatterns = [ + path("base64//", views.empty_view, name="base64"), + path("base64//subpatterns/", include(subpatterns)), + path( + "base64//namespaced/", include((subpatterns, "namespaced-base64")) + ), +] diff --git a/testbed/django__django/tests/urlpatterns/path_dynamic_urls.py b/testbed/django__django/tests/urlpatterns/path_dynamic_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..6c3b9cc1c54d38c7b44a6eef43aa415252bff9fe --- /dev/null +++ b/testbed/django__django/tests/urlpatterns/path_dynamic_urls.py @@ -0,0 +1,9 @@ +from django.urls import path, register_converter + +from . import converters, views + +register_converter(converters.DynamicConverter, "dynamic") + +urlpatterns = [ + path("dynamic//", views.empty_view, name="dynamic"), +] diff --git a/testbed/django__django/tests/urlpatterns/path_same_name_urls.py b/testbed/django__django/tests/urlpatterns/path_same_name_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..d7ea5431b1e2e70e97338b78591e99ba67df435e --- /dev/null +++ b/testbed/django__django/tests/urlpatterns/path_same_name_urls.py @@ -0,0 +1,34 @@ +from django.urls import path, re_path, register_converter + +from . import converters, views + +register_converter(converters.DynamicConverter, "to_url_value_error") + +urlpatterns = [ + # Different number of arguments. + path("number_of_args/0/", views.empty_view, name="number_of_args"), + path("number_of_args/1//", views.empty_view, name="number_of_args"), + # Different names of the keyword arguments. + path("kwargs_names/a//", views.empty_view, name="kwargs_names"), + path("kwargs_names/b//", views.empty_view, name="kwargs_names"), + # Different path converters. + path("converter/path//", views.empty_view, name="converter"), + path("converter/str//", views.empty_view, name="converter"), + path("converter/slug//", views.empty_view, name="converter"), + path("converter/int//", views.empty_view, name="converter"), + path("converter/uuid//", views.empty_view, name="converter"), + # Different regular expressions. + re_path(r"^regex/uppercase/([A-Z]+)/", views.empty_view, name="regex"), + re_path(r"^regex/lowercase/([a-z]+)/", views.empty_view, name="regex"), + # converter.to_url() raises ValueError (no match). + path( + "converter_to_url/int//", + views.empty_view, + name="converter_to_url", + ), + path( + "converter_to_url/tiny_int//", + views.empty_view, + name="converter_to_url", + ), +] diff --git a/testbed/django__django/tests/urlpatterns/path_urls.py b/testbed/django__django/tests/urlpatterns/path_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..8a356b546341746ecc10fcd605f72b18fc4ad27d --- /dev/null +++ b/testbed/django__django/tests/urlpatterns/path_urls.py @@ -0,0 +1,39 @@ +from django.urls import include, path, re_path + +from . import views + +urlpatterns = [ + path("articles/2003/", views.empty_view, name="articles-2003"), + path("articles//", views.empty_view, name="articles-year"), + path( + "articles///", views.empty_view, name="articles-year-month" + ), + path( + "articles////", + views.empty_view, + name="articles-year-month-day", + ), + path("books/2007/", views.empty_view, {"extra": True}, name="books-2007"), + path( + "books////", + views.empty_view, + {"extra": True}, + name="books-year-month-day", + ), + path("users/", views.empty_view, name="users"), + path("users//", views.empty_view, name="user-with-id"), + path("included_urls/", include("urlpatterns.included_urls")), + re_path(r"^regex/(?P[0-9]+)/$", views.empty_view, name="regex"), + re_path( + r"^regex_optional/(?P\d+)/(?:(?P\d+)/)?", + views.empty_view, + name="regex_optional", + ), + re_path( + r"^regex_only_optional/(?:(?P\d+)/)?", + views.empty_view, + name="regex_only_optional", + ), + path("", include("urlpatterns.more_urls"), {"sub-extra": False}), + path("//", views.empty_view, name="lang-and-path"), +] diff --git a/testbed/django__django/tests/urlpatterns/test_resolvers.py b/testbed/django__django/tests/urlpatterns/test_resolvers.py new file mode 100644 index 0000000000000000000000000000000000000000..cb831bbe1c9b767e307d9c8273b14852aa75b023 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns/test_resolvers.py @@ -0,0 +1,23 @@ +from django.test import SimpleTestCase +from django.test.utils import override_settings +from django.urls.resolvers import RegexPattern, RoutePattern, get_resolver +from django.utils.translation import gettext_lazy as _ + + +class RegexPatternTests(SimpleTestCase): + def test_str(self): + self.assertEqual(str(RegexPattern(_("^translated/$"))), "^translated/$") + + +class RoutePatternTests(SimpleTestCase): + def test_str(self): + self.assertEqual(str(RoutePattern(_("translated/"))), "translated/") + + +class ResolverCacheTests(SimpleTestCase): + @override_settings(ROOT_URLCONF="urlpatterns.path_urls") + def test_resolver_cache_default__root_urlconf(self): + # resolver for a default URLconf (passing no argument) and for the + # settings.ROOT_URLCONF is the same cached object. + self.assertIs(get_resolver(), get_resolver("urlpatterns.path_urls")) + self.assertIsNot(get_resolver(), get_resolver("urlpatterns.path_dynamic_urls")) diff --git a/testbed/django__django/tests/urlpatterns/tests.py b/testbed/django__django/tests/urlpatterns/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..f8d73fdb4ad3c2f73743138afd1942c7ed4bc3e1 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns/tests.py @@ -0,0 +1,417 @@ +import string +import uuid + +from django.core.exceptions import ImproperlyConfigured +from django.test import SimpleTestCase +from django.test.utils import override_settings +from django.urls import NoReverseMatch, Resolver404, path, re_path, resolve, reverse +from django.views import View + +from .converters import DynamicConverter +from .views import empty_view + +included_kwargs = {"base": b"hello", "value": b"world"} +converter_test_data = ( + # ('url', ('url_name', 'app_name', {kwargs})), + # aGVsbG8= is 'hello' encoded in base64. + ("/base64/aGVsbG8=/", ("base64", "", {"value": b"hello"})), + ( + "/base64/aGVsbG8=/subpatterns/d29ybGQ=/", + ("subpattern-base64", "", included_kwargs), + ), + ( + "/base64/aGVsbG8=/namespaced/d29ybGQ=/", + ("subpattern-base64", "namespaced-base64", included_kwargs), + ), +) + + +@override_settings(ROOT_URLCONF="urlpatterns.path_urls") +class SimplifiedURLTests(SimpleTestCase): + def test_path_lookup_without_parameters(self): + match = resolve("/articles/2003/") + self.assertEqual(match.url_name, "articles-2003") + self.assertEqual(match.args, ()) + self.assertEqual(match.kwargs, {}) + self.assertEqual(match.route, "articles/2003/") + self.assertEqual(match.captured_kwargs, {}) + self.assertEqual(match.extra_kwargs, {}) + + def test_path_lookup_with_typed_parameters(self): + match = resolve("/articles/2015/") + self.assertEqual(match.url_name, "articles-year") + self.assertEqual(match.args, ()) + self.assertEqual(match.kwargs, {"year": 2015}) + self.assertEqual(match.route, "articles//") + self.assertEqual(match.captured_kwargs, {"year": 2015}) + self.assertEqual(match.extra_kwargs, {}) + + def test_path_lookup_with_multiple_parameters(self): + match = resolve("/articles/2015/04/12/") + self.assertEqual(match.url_name, "articles-year-month-day") + self.assertEqual(match.args, ()) + self.assertEqual(match.kwargs, {"year": 2015, "month": 4, "day": 12}) + self.assertEqual(match.route, "articles////") + self.assertEqual(match.captured_kwargs, {"year": 2015, "month": 4, "day": 12}) + self.assertEqual(match.extra_kwargs, {}) + + def test_path_lookup_with_multiple_parameters_and_extra_kwarg(self): + match = resolve("/books/2015/04/12/") + self.assertEqual(match.url_name, "books-year-month-day") + self.assertEqual(match.args, ()) + self.assertEqual( + match.kwargs, {"year": 2015, "month": 4, "day": 12, "extra": True} + ) + self.assertEqual(match.route, "books////") + self.assertEqual(match.captured_kwargs, {"year": 2015, "month": 4, "day": 12}) + self.assertEqual(match.extra_kwargs, {"extra": True}) + + def test_path_lookup_with_extra_kwarg(self): + match = resolve("/books/2007/") + self.assertEqual(match.url_name, "books-2007") + self.assertEqual(match.args, ()) + self.assertEqual(match.kwargs, {"extra": True}) + self.assertEqual(match.route, "books/2007/") + self.assertEqual(match.captured_kwargs, {}) + self.assertEqual(match.extra_kwargs, {"extra": True}) + + def test_two_variable_at_start_of_path_pattern(self): + match = resolve("/en/foo/") + self.assertEqual(match.url_name, "lang-and-path") + self.assertEqual(match.kwargs, {"lang": "en", "url": "foo"}) + self.assertEqual(match.route, "//") + self.assertEqual(match.captured_kwargs, {"lang": "en", "url": "foo"}) + self.assertEqual(match.extra_kwargs, {}) + + def test_re_path(self): + match = resolve("/regex/1/") + self.assertEqual(match.url_name, "regex") + self.assertEqual(match.kwargs, {"pk": "1"}) + self.assertEqual(match.route, "^regex/(?P[0-9]+)/$") + self.assertEqual(match.captured_kwargs, {"pk": "1"}) + self.assertEqual(match.extra_kwargs, {}) + + def test_re_path_with_optional_parameter(self): + for url, kwargs in ( + ("/regex_optional/1/2/", {"arg1": "1", "arg2": "2"}), + ("/regex_optional/1/", {"arg1": "1"}), + ): + with self.subTest(url=url): + match = resolve(url) + self.assertEqual(match.url_name, "regex_optional") + self.assertEqual(match.kwargs, kwargs) + self.assertEqual( + match.route, + r"^regex_optional/(?P\d+)/(?:(?P\d+)/)?", + ) + self.assertEqual(match.captured_kwargs, kwargs) + self.assertEqual(match.extra_kwargs, {}) + + def test_re_path_with_missing_optional_parameter(self): + match = resolve("/regex_only_optional/") + self.assertEqual(match.url_name, "regex_only_optional") + self.assertEqual(match.kwargs, {}) + self.assertEqual(match.args, ()) + self.assertEqual( + match.route, + r"^regex_only_optional/(?:(?P\d+)/)?", + ) + self.assertEqual(match.captured_kwargs, {}) + self.assertEqual(match.extra_kwargs, {}) + + def test_path_lookup_with_inclusion(self): + match = resolve("/included_urls/extra/something/") + self.assertEqual(match.url_name, "inner-extra") + self.assertEqual(match.route, "included_urls/extra//") + + def test_path_lookup_with_empty_string_inclusion(self): + match = resolve("/more/99/") + self.assertEqual(match.url_name, "inner-more") + self.assertEqual(match.route, r"^more/(?P\w+)/$") + self.assertEqual(match.kwargs, {"extra": "99", "sub-extra": True}) + self.assertEqual(match.captured_kwargs, {"extra": "99"}) + self.assertEqual(match.extra_kwargs, {"sub-extra": True}) + + def test_path_lookup_with_double_inclusion(self): + match = resolve("/included_urls/more/some_value/") + self.assertEqual(match.url_name, "inner-more") + self.assertEqual(match.route, r"included_urls/more/(?P\w+)/$") + + def test_path_reverse_without_parameter(self): + url = reverse("articles-2003") + self.assertEqual(url, "/articles/2003/") + + def test_path_reverse_with_parameter(self): + url = reverse( + "articles-year-month-day", kwargs={"year": 2015, "month": 4, "day": 12} + ) + self.assertEqual(url, "/articles/2015/4/12/") + + @override_settings(ROOT_URLCONF="urlpatterns.path_base64_urls") + def test_converter_resolve(self): + for url, (url_name, app_name, kwargs) in converter_test_data: + with self.subTest(url=url): + match = resolve(url) + self.assertEqual(match.url_name, url_name) + self.assertEqual(match.app_name, app_name) + self.assertEqual(match.kwargs, kwargs) + + @override_settings(ROOT_URLCONF="urlpatterns.path_base64_urls") + def test_converter_reverse(self): + for expected, (url_name, app_name, kwargs) in converter_test_data: + if app_name: + url_name = "%s:%s" % (app_name, url_name) + with self.subTest(url=url_name): + url = reverse(url_name, kwargs=kwargs) + self.assertEqual(url, expected) + + @override_settings(ROOT_URLCONF="urlpatterns.path_base64_urls") + def test_converter_reverse_with_second_layer_instance_namespace(self): + kwargs = included_kwargs.copy() + kwargs["last_value"] = b"world" + url = reverse("instance-ns-base64:subsubpattern-base64", kwargs=kwargs) + self.assertEqual(url, "/base64/aGVsbG8=/subpatterns/d29ybGQ=/d29ybGQ=/") + + def test_path_inclusion_is_matchable(self): + match = resolve("/included_urls/extra/something/") + self.assertEqual(match.url_name, "inner-extra") + self.assertEqual(match.kwargs, {"extra": "something"}) + + def test_path_inclusion_is_reversible(self): + url = reverse("inner-extra", kwargs={"extra": "something"}) + self.assertEqual(url, "/included_urls/extra/something/") + + def test_invalid_kwargs(self): + msg = "kwargs argument must be a dict, but got str." + with self.assertRaisesMessage(TypeError, msg): + path("hello/", empty_view, "name") + with self.assertRaisesMessage(TypeError, msg): + re_path("^hello/$", empty_view, "name") + + def test_invalid_converter(self): + msg = "URL route 'foo//' uses invalid converter 'nonexistent'." + with self.assertRaisesMessage(ImproperlyConfigured, msg): + path("foo//", empty_view) + + def test_invalid_view(self): + msg = "view must be a callable or a list/tuple in the case of include()." + with self.assertRaisesMessage(TypeError, msg): + path("articles/", "invalid_view") + + def test_invalid_view_instance(self): + class EmptyCBV(View): + pass + + msg = "view must be a callable, pass EmptyCBV.as_view(), not EmptyCBV()." + with self.assertRaisesMessage(TypeError, msg): + path("foo", EmptyCBV()) + + def test_whitespace_in_route(self): + msg = ( + "URL route 'space//extra/' cannot contain " + "whitespace in angle brackets <…>" + ) + for whitespace in string.whitespace: + with self.subTest(repr(whitespace)): + with self.assertRaisesMessage(ImproperlyConfigured, msg % whitespace): + path("space//extra/" % whitespace, empty_view) + # Whitespaces are valid in paths. + p = path("space%s//" % string.whitespace, empty_view) + match = p.resolve("space%s/1/" % string.whitespace) + self.assertEqual(match.kwargs, {"num": 1}) + + def test_path_trailing_newlines(self): + tests = [ + "/articles/2003/\n", + "/articles/2010/\n", + "/en/foo/\n", + "/included_urls/extra/\n", + "/regex/1/\n", + "/users/1/\n", + ] + for url in tests: + with self.subTest(url=url), self.assertRaises(Resolver404): + resolve(url) + + +@override_settings(ROOT_URLCONF="urlpatterns.converter_urls") +class ConverterTests(SimpleTestCase): + def test_matching_urls(self): + def no_converter(x): + return x + + test_data = ( + ("int", {"0", "1", "01", 1234567890}, int), + ("str", {"abcxyz"}, no_converter), + ("path", {"allows.ANY*characters"}, no_converter), + ("slug", {"abcxyz-ABCXYZ_01234567890"}, no_converter), + ("uuid", {"39da9369-838e-4750-91a5-f7805cd82839"}, uuid.UUID), + ) + for url_name, url_suffixes, converter in test_data: + for url_suffix in url_suffixes: + url = "/%s/%s/" % (url_name, url_suffix) + with self.subTest(url=url): + match = resolve(url) + self.assertEqual(match.url_name, url_name) + self.assertEqual(match.kwargs, {url_name: converter(url_suffix)}) + # reverse() works with string parameters. + string_kwargs = {url_name: url_suffix} + self.assertEqual(reverse(url_name, kwargs=string_kwargs), url) + # reverse() also works with native types (int, UUID, etc.). + if converter is not no_converter: + # The converted value might be different for int (a + # leading zero is lost in the conversion). + converted_value = match.kwargs[url_name] + converted_url = "/%s/%s/" % (url_name, converted_value) + self.assertEqual( + reverse(url_name, kwargs={url_name: converted_value}), + converted_url, + ) + + def test_nonmatching_urls(self): + test_data = ( + ("int", {"-1", "letters"}), + ("str", {"", "/"}), + ("path", {""}), + ("slug", {"", "stars*notallowed"}), + ( + "uuid", + { + "", + "9da9369-838e-4750-91a5-f7805cd82839", + "39da9369-838-4750-91a5-f7805cd82839", + "39da9369-838e-475-91a5-f7805cd82839", + "39da9369-838e-4750-91a-f7805cd82839", + "39da9369-838e-4750-91a5-f7805cd8283", + }, + ), + ) + for url_name, url_suffixes in test_data: + for url_suffix in url_suffixes: + url = "/%s/%s/" % (url_name, url_suffix) + with self.subTest(url=url), self.assertRaises(Resolver404): + resolve(url) + + +@override_settings(ROOT_URLCONF="urlpatterns.path_same_name_urls") +class SameNameTests(SimpleTestCase): + def test_matching_urls_same_name(self): + @DynamicConverter.register_to_url + def requires_tiny_int(value): + if value > 5: + raise ValueError + return value + + tests = [ + ( + "number_of_args", + [ + ([], {}, "0/"), + ([1], {}, "1/1/"), + ], + ), + ( + "kwargs_names", + [ + ([], {"a": 1}, "a/1/"), + ([], {"b": 1}, "b/1/"), + ], + ), + ( + "converter", + [ + (["a/b"], {}, "path/a/b/"), + (["a b"], {}, "str/a%20b/"), + (["a-b"], {}, "slug/a-b/"), + (["2"], {}, "int/2/"), + ( + ["39da9369-838e-4750-91a5-f7805cd82839"], + {}, + "uuid/39da9369-838e-4750-91a5-f7805cd82839/", + ), + ], + ), + ( + "regex", + [ + (["ABC"], {}, "uppercase/ABC/"), + (["abc"], {}, "lowercase/abc/"), + ], + ), + ( + "converter_to_url", + [ + ([6], {}, "int/6/"), + ([1], {}, "tiny_int/1/"), + ], + ), + ] + for url_name, cases in tests: + for args, kwargs, url_suffix in cases: + expected_url = "/%s/%s" % (url_name, url_suffix) + with self.subTest(url=expected_url): + self.assertEqual( + reverse(url_name, args=args, kwargs=kwargs), + expected_url, + ) + + +class ParameterRestrictionTests(SimpleTestCase): + def test_integer_parameter_name_causes_exception(self): + msg = ( + "URL route 'hello//' uses parameter name '1' which isn't " + "a valid Python identifier." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + path(r"hello//", lambda r: None) + + def test_non_identifier_parameter_name_causes_exception(self): + msg = ( + "URL route 'b//' uses parameter name 'book.id' which " + "isn't a valid Python identifier." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + path(r"b//", lambda r: None) + + def test_allows_non_ascii_but_valid_identifiers(self): + # \u0394 is "GREEK CAPITAL LETTER DELTA", a valid identifier. + p = path("hello//", lambda r: None) + match = p.resolve("hello/1/") + self.assertEqual(match.kwargs, {"\u0394": "1"}) + + +@override_settings(ROOT_URLCONF="urlpatterns.path_dynamic_urls") +class ConversionExceptionTests(SimpleTestCase): + """How are errors in Converter.to_python() and to_url() handled?""" + + def test_resolve_value_error_means_no_match(self): + @DynamicConverter.register_to_python + def raises_value_error(value): + raise ValueError() + + with self.assertRaises(Resolver404): + resolve("/dynamic/abc/") + + def test_resolve_type_error_propagates(self): + @DynamicConverter.register_to_python + def raises_type_error(value): + raise TypeError("This type error propagates.") + + with self.assertRaisesMessage(TypeError, "This type error propagates."): + resolve("/dynamic/abc/") + + def test_reverse_value_error_means_no_match(self): + @DynamicConverter.register_to_url + def raises_value_error(value): + raise ValueError + + with self.assertRaises(NoReverseMatch): + reverse("dynamic", kwargs={"value": object()}) + + def test_reverse_type_error_propagates(self): + @DynamicConverter.register_to_url + def raises_type_error(value): + raise TypeError("This type error propagates.") + + with self.assertRaisesMessage(TypeError, "This type error propagates."): + reverse("dynamic", kwargs={"value": object()}) diff --git a/testbed/django__django/tests/urlpatterns/views.py b/testbed/django__django/tests/urlpatterns/views.py new file mode 100644 index 0000000000000000000000000000000000000000..3f8e055a5300f8fa808699645de1ef0db3de37af --- /dev/null +++ b/testbed/django__django/tests/urlpatterns/views.py @@ -0,0 +1,5 @@ +from django.http import HttpResponse + + +def empty_view(request, *args, **kwargs): + return HttpResponse() diff --git a/testbed/django__django/tests/urlpatterns_reverse/__init__.py b/testbed/django__django/tests/urlpatterns_reverse/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/urlpatterns_reverse/erroneous_urls.py b/testbed/django__django/tests/urlpatterns_reverse/erroneous_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..24c39a47dc7a66af6215afcbad22990c2e3fc4ca --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/erroneous_urls.py @@ -0,0 +1,7 @@ +from django.urls import re_path + +from . import views + +urlpatterns = [ + re_path(r"(regex_error/$", views.empty_view), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/extra_urls.py b/testbed/django__django/tests/urlpatterns_reverse/extra_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..c356273f7bee0e6d48113cfb69d0946301e63ae2 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/extra_urls.py @@ -0,0 +1,14 @@ +""" +Some extra URL patterns that are included at the top level. +""" + +from django.urls import include, path, re_path + +from .views import empty_view + +urlpatterns = [ + re_path("^e-places/([0-9]+)/$", empty_view, name="extra-places"), + re_path(r"^e-people/(?P\w+)/$", empty_view, name="extra-people"), + path("", include("urlpatterns_reverse.included_urls2")), + re_path(r"^prefix/(?P\w+)/", include("urlpatterns_reverse.included_urls2")), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/included_app_urls.py b/testbed/django__django/tests/urlpatterns_reverse/included_app_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..e41007f472305488e9f15b98075fcc64cf7ad818 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/included_app_urls.py @@ -0,0 +1,25 @@ +from django.urls import path, re_path + +from . import views + +app_name = "inc-app" +urlpatterns = [ + path("normal/", views.empty_view, name="inc-normal-view"), + re_path( + "^normal/(?P[0-9]+)/(?P[0-9]+)/$", + views.empty_view, + name="inc-normal-view", + ), + re_path(r"^\+\\\$\*/$", views.empty_view, name="inc-special-view"), + re_path( + "^mixed_args/([0-9]+)/(?P[0-9]+)/$", + views.empty_view, + name="inc-mixed-args", + ), + re_path("^no_kwargs/([0-9]+)/([0-9]+)/$", views.empty_view, name="inc-no-kwargs"), + re_path( + "^view_class/(?P[0-9]+)/(?P[0-9]+)/$", + views.view_class_instance, + name="inc-view-class", + ), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/included_named_urls.py b/testbed/django__django/tests/urlpatterns_reverse/included_named_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..e9fd7b974293f9bd967c463df89bb303dd3ccf68 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/included_named_urls.py @@ -0,0 +1,10 @@ +from django.urls import include, path, re_path + +from .views import empty_view + +urlpatterns = [ + path("", empty_view, name="named-url3"), + re_path(r"^extra/(?P\w+)/$", empty_view, name="named-url4"), + re_path(r"^(?P[0-9]+)|(?P[0-9]+)/$", empty_view), + path("included/", include("urlpatterns_reverse.included_named_urls2")), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/included_named_urls2.py b/testbed/django__django/tests/urlpatterns_reverse/included_named_urls2.py new file mode 100644 index 0000000000000000000000000000000000000000..c5e002c67aca3776909a7da92ef166220734b8a1 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/included_named_urls2.py @@ -0,0 +1,9 @@ +from django.urls import path, re_path + +from .views import empty_view + +urlpatterns = [ + path("", empty_view, name="named-url5"), + re_path(r"^extra/(?P\w+)/$", empty_view, name="named-url6"), + re_path(r"^(?P[0-9]+)|(?P[0-9]+)/$", empty_view), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/included_namespace_urls.py b/testbed/django__django/tests/urlpatterns_reverse/included_namespace_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..a120cde188f75944d01dc1b6a7b45d5a68534205 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/included_namespace_urls.py @@ -0,0 +1,39 @@ +from django.urls import include, path, re_path + +from .utils import URLObject +from .views import empty_view, view_class_instance + +testobj3 = URLObject("testapp", "test-ns3") +testobj4 = URLObject("testapp", "test-ns4") + +app_name = "included_namespace_urls" +urlpatterns = [ + path("normal/", empty_view, name="inc-normal-view"), + re_path( + "^normal/(?P[0-9]+)/(?P[0-9]+)/$", + empty_view, + name="inc-normal-view", + ), + re_path(r"^\+\\\$\*/$", empty_view, name="inc-special-view"), + re_path( + "^mixed_args/([0-9]+)/(?P[0-9]+)/$", empty_view, name="inc-mixed-args" + ), + re_path("^no_kwargs/([0-9]+)/([0-9]+)/$", empty_view, name="inc-no-kwargs"), + re_path( + "^view_class/(?P[0-9]+)/(?P[0-9]+)/$", + view_class_instance, + name="inc-view-class", + ), + path("test3/", include(*testobj3.urls)), + path("test4/", include(*testobj4.urls)), + path( + "ns-included3/", + include( + ("urlpatterns_reverse.included_urls", "included_urls"), namespace="inc-ns3" + ), + ), + path( + "ns-included4/", + include("urlpatterns_reverse.namespace_urls", namespace="inc-ns4"), + ), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/included_no_kwargs_urls.py b/testbed/django__django/tests/urlpatterns_reverse/included_no_kwargs_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..b51b600fe8b27e6d8d8cf4d88af74f14a43d088e --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/included_no_kwargs_urls.py @@ -0,0 +1,7 @@ +from django.urls import re_path + +from .views import empty_view + +urlpatterns = [ + re_path("^inner-no-kwargs/([0-9]+)/$", empty_view, name="inner-no-kwargs") +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/included_urls.py b/testbed/django__django/tests/urlpatterns_reverse/included_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..9e80cf59522fc9fa21c6e1da94a5e3091aed17e5 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/included_urls.py @@ -0,0 +1,9 @@ +from django.urls import path, re_path + +from .views import empty_view + +urlpatterns = [ + path("", empty_view, name="inner-nothing"), + re_path(r"extra/(?P\w+)/$", empty_view, name="inner-extra"), + re_path(r"(?P[0-9]+)|(?P[0-9]+)/$", empty_view, name="inner-disjunction"), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/included_urls2.py b/testbed/django__django/tests/urlpatterns_reverse/included_urls2.py new file mode 100644 index 0000000000000000000000000000000000000000..5d502383c27b389a1247322812b0b99b2d67513f --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/included_urls2.py @@ -0,0 +1,15 @@ +""" +These URL patterns are included in two different ways in the main urls.py, with +an extra argument present in one case. Thus, there are two different ways for +each name to resolve and Django must distinguish the possibilities based on the +argument list. +""" + +from django.urls import re_path + +from .views import empty_view + +urlpatterns = [ + re_path(r"^part/(?P\w+)/$", empty_view, name="part"), + re_path(r"^part2/(?:(?P\w+)/)?$", empty_view, name="part2"), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/method_view_urls.py b/testbed/django__django/tests/urlpatterns_reverse/method_view_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..076748f75bfaf79438be0368bbbb4cecd64d8a30 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/method_view_urls.py @@ -0,0 +1,19 @@ +from django.urls import path + + +class ViewContainer: + def method_view(self, request): + pass + + @classmethod + def classmethod_view(cls, request): + pass + + +view_container = ViewContainer() + + +urlpatterns = [ + path("", view_container.method_view, name="instance-method-url"), + path("", ViewContainer.classmethod_view, name="instance-method-url"), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/middleware.py b/testbed/django__django/tests/urlpatterns_reverse/middleware.py new file mode 100644 index 0000000000000000000000000000000000000000..5fc04056299a9d9788b48373ba8997e20526c98d --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/middleware.py @@ -0,0 +1,41 @@ +from django.http import HttpResponse, StreamingHttpResponse +from django.urls import reverse +from django.utils.deprecation import MiddlewareMixin + +from . import urlconf_inner + + +class ChangeURLconfMiddleware(MiddlewareMixin): + def process_request(self, request): + request.urlconf = urlconf_inner.__name__ + + +class NullChangeURLconfMiddleware(MiddlewareMixin): + def process_request(self, request): + request.urlconf = None + + +class ReverseInnerInResponseMiddleware(MiddlewareMixin): + def process_response(self, *args, **kwargs): + return HttpResponse(reverse("inner")) + + +class ReverseOuterInResponseMiddleware(MiddlewareMixin): + def process_response(self, *args, **kwargs): + return HttpResponse(reverse("outer")) + + +class ReverseInnerInStreaming(MiddlewareMixin): + def process_view(self, *args, **kwargs): + def stream(): + yield reverse("inner") + + return StreamingHttpResponse(stream()) + + +class ReverseOuterInStreaming(MiddlewareMixin): + def process_view(self, *args, **kwargs): + def stream(): + yield reverse("outer") + + return StreamingHttpResponse(stream()) diff --git a/testbed/django__django/tests/urlpatterns_reverse/named_urls.py b/testbed/django__django/tests/urlpatterns_reverse/named_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..af0d69dbab97ffdfba639d9931df36b9ce137e2f --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/named_urls.py @@ -0,0 +1,10 @@ +from django.urls import include, path, re_path + +from .views import empty_view + +urlpatterns = [ + path("", empty_view, name="named-url1"), + re_path(r"^extra/(?P\w+)/$", empty_view, name="named-url2"), + re_path(r"^(?P[0-9]+)|(?P[0-9]+)/$", empty_view), + path("included/", include("urlpatterns_reverse.included_named_urls")), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/named_urls_conflict.py b/testbed/django__django/tests/urlpatterns_reverse/named_urls_conflict.py new file mode 100644 index 0000000000000000000000000000000000000000..982dd4f157f82c03c033699684b4a56e50141f12 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/named_urls_conflict.py @@ -0,0 +1,25 @@ +from django.urls import path, re_path + +from .views import empty_view + +urlpatterns = [ + # No kwargs + path("conflict/cannot-go-here/", empty_view, name="name-conflict"), + path("conflict/", empty_view, name="name-conflict"), + # One kwarg + re_path(r"^conflict-first/(?P\w+)/$", empty_view, name="name-conflict"), + re_path( + r"^conflict-cannot-go-here/(?P\w+)/$", empty_view, name="name-conflict" + ), + re_path(r"^conflict-middle/(?P\w+)/$", empty_view, name="name-conflict"), + re_path(r"^conflict-last/(?P\w+)/$", empty_view, name="name-conflict"), + # Two kwargs + re_path( + r"^conflict/(?P\w+)/(?P\w+)/cannot-go-here/$", + empty_view, + name="name-conflict", + ), + re_path( + r"^conflict/(?P\w+)/(?P\w+)/$", empty_view, name="name-conflict" + ), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/namespace_urls.py b/testbed/django__django/tests/urlpatterns_reverse/namespace_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..4a39ab22901521471a29af28da57d88a0aabc527 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/namespace_urls.py @@ -0,0 +1,87 @@ +from django.urls import include, path, re_path + +from . import views +from .utils import URLObject + +testobj1 = URLObject("testapp", "test-ns1") +testobj2 = URLObject("testapp", "test-ns2") +default_testobj = URLObject("testapp", "testapp") + +otherobj1 = URLObject("nodefault", "other-ns1") +otherobj2 = URLObject("nodefault", "other-ns2") + +newappobj1 = URLObject("newapp") + +app_name = "namespace_urls" +urlpatterns = [ + path("normal/", views.empty_view, name="normal-view"), + re_path( + r"^normal/(?P[0-9]+)/(?P[0-9]+)/$", + views.empty_view, + name="normal-view", + ), + path("resolver_match/", views.pass_resolver_match_view, name="test-resolver-match"), + re_path(r"^\+\\\$\*/$", views.empty_view, name="special-view"), + re_path( + r"^mixed_args/([0-9]+)/(?P[0-9]+)/$", + views.empty_view, + {"extra": True}, + name="mixed-args", + ), + re_path(r"^no_kwargs/([0-9]+)/([0-9]+)/$", views.empty_view, name="no-kwargs"), + re_path( + r"^view_class/(?P[0-9]+)/(?P[0-9]+)/$", + views.view_class_instance, + name="view-class", + ), + re_path(r"^unnamed/normal/(?P[0-9]+)/(?P[0-9]+)/$", views.empty_view), + re_path( + r"^unnamed/view_class/(?P[0-9]+)/(?P[0-9]+)/$", + views.view_class_instance, + ), + path("test1/", include(*testobj1.urls)), + path("test2/", include(*testobj2.urls)), + path("default/", include(*default_testobj.urls)), + path("other1/", include(*otherobj1.urls)), + re_path(r"^other[246]/", include(*otherobj2.urls)), + path("newapp1/", include(newappobj1.app_urls, "new-ns1")), + path("new-default/", include(newappobj1.app_urls)), + re_path( + r"^app-included[135]/", + include("urlpatterns_reverse.included_app_urls", namespace="app-ns1"), + ), + path( + "app-included2/", + include("urlpatterns_reverse.included_app_urls", namespace="app-ns2"), + ), + re_path( + r"^ns-included[135]/", + include("urlpatterns_reverse.included_namespace_urls", namespace="inc-ns1"), + ), + path( + "ns-included2/", + include("urlpatterns_reverse.included_namespace_urls", namespace="inc-ns2"), + ), + path( + "app-included/", + include("urlpatterns_reverse.included_namespace_urls", "inc-app"), + ), + path("included/", include("urlpatterns_reverse.included_namespace_urls")), + re_path( + r"^inc(?P[0-9]+)/", + include( + ("urlpatterns_reverse.included_urls", "included_urls"), namespace="inc-ns5" + ), + ), + re_path( + r"^included/([0-9]+)/", include("urlpatterns_reverse.included_namespace_urls") + ), + re_path( + r"^ns-outer/(?P[0-9]+)/", + include("urlpatterns_reverse.included_namespace_urls", namespace="inc-outer"), + ), + re_path( + r"^\+\\\$\*/", + include("urlpatterns_reverse.namespace_urls", namespace="special"), + ), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/nested_urls.py b/testbed/django__django/tests/urlpatterns_reverse/nested_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..0986759a682ee55d4f96b0667358af204cd1b07f --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/nested_urls.py @@ -0,0 +1,28 @@ +from django.urls import include, path +from django.views import View + + +def view1(request): + pass + + +def view2(request): + pass + + +class View3(View): + pass + + +nested = ( + [ + path("view1/", view1, name="view1"), + path("view3/", View3.as_view(), name="view3"), + ], + "backend", +) + +urlpatterns = [ + path("some/path/", include(nested, namespace="nested")), + path("view2/", view2, name="view2"), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/no_urls.py b/testbed/django__django/tests/urlpatterns_reverse/no_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/urlpatterns_reverse/nonimported_module.py b/testbed/django__django/tests/urlpatterns_reverse/nonimported_module.py new file mode 100644 index 0000000000000000000000000000000000000000..df046333d3ac63fddd68a1a4e83ed6d22e5470a8 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/nonimported_module.py @@ -0,0 +1,3 @@ +def view(request): + """Stub view""" + pass diff --git a/testbed/django__django/tests/urlpatterns_reverse/reverse_lazy_urls.py b/testbed/django__django/tests/urlpatterns_reverse/reverse_lazy_urls.py new file mode 100644 index 0000000000000000000000000000000000000000..049c2e3ee8cf2570f67007d3ebe381a5463df890 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/reverse_lazy_urls.py @@ -0,0 +1,10 @@ +from django.urls import path + +from .views import LazyRedirectView, empty_view, login_required_view + +urlpatterns = [ + path("redirected_to/", empty_view, name="named-lazy-url-redirected-to"), + path("login/", empty_view, name="some-login-page"), + path("login_required_view/", login_required_view), + path("redirect/", LazyRedirectView.as_view()), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/test_localeregexdescriptor.py b/testbed/django__django/tests/urlpatterns_reverse/test_localeregexdescriptor.py new file mode 100644 index 0000000000000000000000000000000000000000..3246329432bc6ce9cbfa7270cfde929a18224c22 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/test_localeregexdescriptor.py @@ -0,0 +1,62 @@ +import os +from pathlib import Path +from unittest import mock + +from django.core.exceptions import ImproperlyConfigured +from django.test import SimpleTestCase, override_settings +from django.urls.resolvers import LocaleRegexDescriptor, RegexPattern +from django.utils import translation + +here = os.path.dirname(os.path.abspath(__file__)) + + +@override_settings(LOCALE_PATHS=[os.path.join(here, "translations", "locale")]) +class LocaleRegexDescriptorTests(SimpleTestCase): + def setUp(self): + translation.trans_real._translations = {} + + def tearDown(self): + translation.trans_real._translations = {} + + def test_translated_regex_compiled_per_language(self): + provider = RegexPattern(translation.gettext_lazy("^foo/$")) + with translation.override("de"): + de_compiled = provider.regex + # compiled only once per language + error = AssertionError( + "tried to compile url regex twice for the same language" + ) + with mock.patch("django.urls.resolvers.re.compile", side_effect=error): + de_compiled_2 = provider.regex + with translation.override("fr"): + fr_compiled = provider.regex + self.assertEqual(fr_compiled.pattern, "^foo-fr/$") + self.assertEqual(de_compiled.pattern, "^foo-de/$") + self.assertEqual(de_compiled, de_compiled_2) + + def test_nontranslated_regex_compiled_once(self): + provider = RegexPattern("^foo/$") + with translation.override("de"): + de_compiled = provider.regex + with translation.override("fr"): + # compiled only once, regardless of language + error = AssertionError("tried to compile non-translated url regex twice") + with mock.patch("django.urls.resolvers.re.compile", side_effect=error): + fr_compiled = provider.regex + self.assertEqual(de_compiled.pattern, "^foo/$") + self.assertEqual(fr_compiled.pattern, "^foo/$") + + def test_regex_compile_error(self): + """Regex errors are re-raised as ImproperlyConfigured.""" + provider = RegexPattern("*") + msg = '"*" is not a valid regular expression: nothing to repeat' + with self.assertRaisesMessage(ImproperlyConfigured, msg): + provider.regex + + def test_access_locale_regex_descriptor(self): + self.assertIsInstance(RegexPattern.regex, LocaleRegexDescriptor) + + +@override_settings(LOCALE_PATHS=[Path(here) / "translations" / "locale"]) +class LocaleRegexDescriptorPathLibTests(LocaleRegexDescriptorTests): + pass diff --git a/testbed/django__django/tests/urlpatterns_reverse/tests.py b/testbed/django__django/tests/urlpatterns_reverse/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..89dfd0deba4fd5c7b8b95eb092f4880733da4b66 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/tests.py @@ -0,0 +1,1785 @@ +""" +Unit tests for reverse URL lookups. +""" +import pickle +import sys +import threading + +from admin_scripts.tests import AdminScriptTestCase + +from django.conf import settings +from django.contrib.auth.models import User +from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist +from django.http import HttpRequest, HttpResponsePermanentRedirect, HttpResponseRedirect +from django.shortcuts import redirect +from django.test import RequestFactory, SimpleTestCase, TestCase, override_settings +from django.test.utils import override_script_prefix +from django.urls import ( + NoReverseMatch, + Resolver404, + ResolverMatch, + URLPattern, + URLResolver, + get_callable, + get_resolver, + get_urlconf, + include, + path, + re_path, + resolve, + reverse, + reverse_lazy, +) +from django.urls.resolvers import RegexPattern + +from . import middleware, urlconf_outer, views +from .utils import URLObject +from .views import empty_view + +resolve_test_data = ( + # These entries are in the format: + # (path, url_name, app_name, namespace, view_name, func, args, kwargs) + # Simple case + ( + "/normal/42/37/", + "normal-view", + "", + "", + "normal-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/view_class/42/37/", + "view-class", + "", + "", + "view-class", + views.view_class_instance, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/included/normal/42/37/", + "inc-normal-view", + "included_namespace_urls", + "included_namespace_urls", + "included_namespace_urls:inc-normal-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/included/view_class/42/37/", + "inc-view-class", + "included_namespace_urls", + "included_namespace_urls", + "included_namespace_urls:inc-view-class", + views.view_class_instance, + (), + {"arg1": "42", "arg2": "37"}, + ), + # Unnamed args are dropped if you have *any* kwargs in a pattern + ( + "/mixed_args/42/37/", + "mixed-args", + "", + "", + "mixed-args", + views.empty_view, + (), + {"extra": True, "arg2": "37"}, + ), + ( + "/included/mixed_args/42/37/", + "inc-mixed-args", + "included_namespace_urls", + "included_namespace_urls", + "included_namespace_urls:inc-mixed-args", + views.empty_view, + (), + {"arg2": "37"}, + ), + ( + "/included/12/mixed_args/42/37/", + "inc-mixed-args", + "included_namespace_urls", + "included_namespace_urls", + "included_namespace_urls:inc-mixed-args", + views.empty_view, + (), + {"arg2": "37"}, + ), + # Unnamed views should have None as the url_name. Regression data for #21157. + ( + "/unnamed/normal/42/37/", + None, + "", + "", + "urlpatterns_reverse.views.empty_view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/unnamed/view_class/42/37/", + None, + "", + "", + "urlpatterns_reverse.views.ViewClass", + views.view_class_instance, + (), + {"arg1": "42", "arg2": "37"}, + ), + # If you have no kwargs, you get an args list. + ( + "/no_kwargs/42/37/", + "no-kwargs", + "", + "", + "no-kwargs", + views.empty_view, + ("42", "37"), + {}, + ), + ( + "/included/no_kwargs/42/37/", + "inc-no-kwargs", + "included_namespace_urls", + "included_namespace_urls", + "included_namespace_urls:inc-no-kwargs", + views.empty_view, + ("42", "37"), + {}, + ), + ( + "/included/12/no_kwargs/42/37/", + "inc-no-kwargs", + "included_namespace_urls", + "included_namespace_urls", + "included_namespace_urls:inc-no-kwargs", + views.empty_view, + ("12", "42", "37"), + {}, + ), + # Namespaces + ( + "/test1/inner/42/37/", + "urlobject-view", + "testapp", + "test-ns1", + "test-ns1:urlobject-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/included/test3/inner/42/37/", + "urlobject-view", + "included_namespace_urls:testapp", + "included_namespace_urls:test-ns3", + "included_namespace_urls:test-ns3:urlobject-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/ns-included1/normal/42/37/", + "inc-normal-view", + "included_namespace_urls", + "inc-ns1", + "inc-ns1:inc-normal-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/included/test3/inner/42/37/", + "urlobject-view", + "included_namespace_urls:testapp", + "included_namespace_urls:test-ns3", + "included_namespace_urls:test-ns3:urlobject-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/default/inner/42/37/", + "urlobject-view", + "testapp", + "testapp", + "testapp:urlobject-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/other2/inner/42/37/", + "urlobject-view", + "nodefault", + "other-ns2", + "other-ns2:urlobject-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/other1/inner/42/37/", + "urlobject-view", + "nodefault", + "other-ns1", + "other-ns1:urlobject-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + # Nested namespaces + ( + "/ns-included1/test3/inner/42/37/", + "urlobject-view", + "included_namespace_urls:testapp", + "inc-ns1:test-ns3", + "inc-ns1:test-ns3:urlobject-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/ns-included1/ns-included4/ns-included2/test3/inner/42/37/", + "urlobject-view", + "included_namespace_urls:namespace_urls:included_namespace_urls:testapp", + "inc-ns1:inc-ns4:inc-ns2:test-ns3", + "inc-ns1:inc-ns4:inc-ns2:test-ns3:urlobject-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/app-included/test3/inner/42/37/", + "urlobject-view", + "included_namespace_urls:testapp", + "inc-app:test-ns3", + "inc-app:test-ns3:urlobject-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + ( + "/app-included/ns-included4/ns-included2/test3/inner/42/37/", + "urlobject-view", + "included_namespace_urls:namespace_urls:included_namespace_urls:testapp", + "inc-app:inc-ns4:inc-ns2:test-ns3", + "inc-app:inc-ns4:inc-ns2:test-ns3:urlobject-view", + views.empty_view, + (), + {"arg1": "42", "arg2": "37"}, + ), + # Namespaces capturing variables + ( + "/inc70/", + "inner-nothing", + "included_urls", + "inc-ns5", + "inc-ns5:inner-nothing", + views.empty_view, + (), + {"outer": "70"}, + ), + ( + "/inc78/extra/foobar/", + "inner-extra", + "included_urls", + "inc-ns5", + "inc-ns5:inner-extra", + views.empty_view, + (), + {"outer": "78", "extra": "foobar"}, + ), +) + +test_data = ( + ("places", "/places/3/", [3], {}), + ("places", "/places/3/", ["3"], {}), + ("places", NoReverseMatch, ["a"], {}), + ("places", NoReverseMatch, [], {}), + ("places?", "/place/", [], {}), + ("places+", "/places/", [], {}), + ("places*", "/place/", [], {}), + ("places2?", "/", [], {}), + ("places2+", "/places/", [], {}), + ("places2*", "/", [], {}), + ("places3", "/places/4/", [4], {}), + ("places3", "/places/harlem/", ["harlem"], {}), + ("places3", NoReverseMatch, ["harlem64"], {}), + ("places4", "/places/3/", [], {"id": 3}), + ("people", NoReverseMatch, [], {}), + ("people", "/people/adrian/", ["adrian"], {}), + ("people", "/people/adrian/", [], {"name": "adrian"}), + ("people", NoReverseMatch, ["name with spaces"], {}), + ("people", NoReverseMatch, [], {"name": "name with spaces"}), + ("people2", "/people/name/", [], {}), + ("people2a", "/people/name/fred/", ["fred"], {}), + ("people_backref", "/people/nate-nate/", ["nate"], {}), + ("people_backref", "/people/nate-nate/", [], {"name": "nate"}), + ("optional", "/optional/fred/", [], {"name": "fred"}), + ("optional", "/optional/fred/", ["fred"], {}), + ("named_optional", "/optional/1/", [1], {}), + ("named_optional", "/optional/1/", [], {"arg1": 1}), + ("named_optional", "/optional/1/2/", [1, 2], {}), + ("named_optional", "/optional/1/2/", [], {"arg1": 1, "arg2": 2}), + ("named_optional_terminated", "/optional/1/", [1], {}), + ("named_optional_terminated", "/optional/1/", [], {"arg1": 1}), + ("named_optional_terminated", "/optional/1/2/", [1, 2], {}), + ("named_optional_terminated", "/optional/1/2/", [], {"arg1": 1, "arg2": 2}), + ("hardcoded", "/hardcoded/", [], {}), + ("hardcoded2", "/hardcoded/doc.pdf", [], {}), + ("people3", "/people/il/adrian/", [], {"state": "il", "name": "adrian"}), + ("people3", NoReverseMatch, [], {"state": "il"}), + ("people3", NoReverseMatch, [], {"name": "adrian"}), + ("people4", NoReverseMatch, [], {"state": "il", "name": "adrian"}), + ("people6", "/people/il/test/adrian/", ["il/test", "adrian"], {}), + ("people6", "/people//adrian/", ["adrian"], {}), + ("range", "/character_set/a/", [], {}), + ("range2", "/character_set/x/", [], {}), + ("price", "/price/$10/", ["10"], {}), + ("price2", "/price/$10/", ["10"], {}), + ("price3", "/price/$10/", ["10"], {}), + ( + "product", + "/product/chocolate+($2.00)/", + [], + {"price": "2.00", "product": "chocolate"}, + ), + ("headlines", "/headlines/2007.5.21/", [], {"year": 2007, "month": 5, "day": 21}), + ( + "windows", + r"/windows_path/C:%5CDocuments%20and%20Settings%5Cspam/", + [], + {"drive_name": "C", "path": r"Documents and Settings\spam"}, + ), + ("special", r"/special_chars/~@+%5C$*%7C/", [r"~@+\$*|"], {}), + ("special", r"/special_chars/some%20resource/", [r"some resource"], {}), + ("special", r"/special_chars/10%25%20complete/", [r"10% complete"], {}), + ("special", r"/special_chars/some%20resource/", [], {"chars": r"some resource"}), + ("special", r"/special_chars/10%25%20complete/", [], {"chars": r"10% complete"}), + ("special", NoReverseMatch, [""], {}), + ("mixed", "/john/0/", [], {"name": "john"}), + ("repeats", "/repeats/a/", [], {}), + ("repeats2", "/repeats/aa/", [], {}), + ("repeats3", "/repeats/aa/", [], {}), + ("test", "/test/1", [], {}), + ("inner-nothing", "/outer/42/", [], {"outer": "42"}), + ("inner-nothing", "/outer/42/", ["42"], {}), + ("inner-nothing", NoReverseMatch, ["foo"], {}), + ("inner-extra", "/outer/42/extra/inner/", [], {"extra": "inner", "outer": "42"}), + ("inner-extra", "/outer/42/extra/inner/", ["42", "inner"], {}), + ("inner-extra", NoReverseMatch, ["fred", "inner"], {}), + ("inner-no-kwargs", "/outer-no-kwargs/42/inner-no-kwargs/1/", ["42", "1"], {}), + ("disjunction", NoReverseMatch, ["foo"], {}), + ("inner-disjunction", NoReverseMatch, ["10", "11"], {}), + ("extra-places", "/e-places/10/", ["10"], {}), + ("extra-people", "/e-people/fred/", ["fred"], {}), + ("extra-people", "/e-people/fred/", [], {"name": "fred"}), + ("part", "/part/one/", [], {"value": "one"}), + ("part", "/prefix/xx/part/one/", [], {"value": "one", "prefix": "xx"}), + ("part2", "/part2/one/", [], {"value": "one"}), + ("part2", "/part2/", [], {}), + ("part2", "/prefix/xx/part2/one/", [], {"value": "one", "prefix": "xx"}), + ("part2", "/prefix/xx/part2/", [], {"prefix": "xx"}), + # Tests for nested groups. Nested capturing groups will only work if you + # *only* supply the correct outer group. + ("nested-noncapture", "/nested/noncapture/opt", [], {"p": "opt"}), + ("nested-capture", "/nested/capture/opt/", ["opt/"], {}), + ("nested-capture", NoReverseMatch, [], {"p": "opt"}), + ("nested-mixedcapture", "/nested/capture/mixed/opt", ["opt"], {}), + ("nested-mixedcapture", NoReverseMatch, [], {"p": "opt"}), + ("nested-namedcapture", "/nested/capture/named/opt/", [], {"outer": "opt/"}), + ("nested-namedcapture", NoReverseMatch, [], {"outer": "opt/", "inner": "opt"}), + ("nested-namedcapture", NoReverseMatch, [], {"inner": "opt"}), + ("non_path_include", "/includes/non_path_include/", [], {}), + # Tests for #13154 + ("defaults", "/defaults_view1/3/", [], {"arg1": 3, "arg2": 1}), + ("defaults", "/defaults_view2/3/", [], {"arg1": 3, "arg2": 2}), + ("defaults", NoReverseMatch, [], {"arg1": 3, "arg2": 3}), + ("defaults", NoReverseMatch, [], {"arg2": 1}), + # Security tests + ("security", "/%2Fexample.com/security/", ["/example.com"], {}), +) + + +@override_settings(ROOT_URLCONF="urlpatterns_reverse.no_urls") +class NoURLPatternsTests(SimpleTestCase): + def test_no_urls_exception(self): + """ + URLResolver should raise an exception when no urlpatterns exist. + """ + resolver = URLResolver(RegexPattern(r"^$"), settings.ROOT_URLCONF) + + with self.assertRaisesMessage( + ImproperlyConfigured, + "The included URLconf 'urlpatterns_reverse.no_urls' does not " + "appear to have any patterns in it. If you see the 'urlpatterns' " + "variable with valid patterns in the file then the issue is " + "probably caused by a circular import.", + ): + getattr(resolver, "url_patterns") + + +@override_settings(ROOT_URLCONF="urlpatterns_reverse.urls") +class URLPatternReverse(SimpleTestCase): + def test_urlpattern_reverse(self): + for name, expected, args, kwargs in test_data: + with self.subTest(name=name, args=args, kwargs=kwargs): + try: + got = reverse(name, args=args, kwargs=kwargs) + except NoReverseMatch: + self.assertEqual(NoReverseMatch, expected) + else: + self.assertEqual(got, expected) + + def test_reverse_none(self): + # Reversing None should raise an error, not return the last un-named view. + with self.assertRaises(NoReverseMatch): + reverse(None) + + def test_mixing_args_and_kwargs(self): + msg = "Don't mix *args and **kwargs in call to reverse()!" + with self.assertRaisesMessage(ValueError, msg): + reverse("name", args=["a"], kwargs={"b": "c"}) + + @override_script_prefix("/{{invalid}}/") + def test_prefix_braces(self): + self.assertEqual( + "/%7B%7Binvalid%7D%7D/includes/non_path_include/", + reverse("non_path_include"), + ) + + def test_prefix_parenthesis(self): + # Parentheses are allowed and should not cause errors or be escaped + with override_script_prefix("/bogus)/"): + self.assertEqual( + "/bogus)/includes/non_path_include/", reverse("non_path_include") + ) + with override_script_prefix("/(bogus)/"): + self.assertEqual( + "/(bogus)/includes/non_path_include/", reverse("non_path_include") + ) + + @override_script_prefix("/bump%20map/") + def test_prefix_format_char(self): + self.assertEqual( + "/bump%2520map/includes/non_path_include/", reverse("non_path_include") + ) + + @override_script_prefix("/%7Eme/") + def test_non_urlsafe_prefix_with_args(self): + # Regression for #20022, adjusted for #24013 because ~ is an unreserved + # character. Tests whether % is escaped. + self.assertEqual("/%257Eme/places/1/", reverse("places", args=[1])) + + def test_patterns_reported(self): + # Regression for #17076 + with self.assertRaisesMessage( + NoReverseMatch, r"1 pattern(s) tried: ['people/(?P\\w+)/$']" + ): + # this url exists, but requires an argument + reverse("people", args=[]) + + @override_script_prefix("/script:name/") + def test_script_name_escaping(self): + self.assertEqual( + reverse("optional", args=["foo:bar"]), "/script:name/optional/foo:bar/" + ) + + def test_view_not_found_message(self): + msg = ( + "Reverse for 'nonexistent-view' not found. 'nonexistent-view' " + "is not a valid view function or pattern name." + ) + with self.assertRaisesMessage(NoReverseMatch, msg): + reverse("nonexistent-view") + + def test_no_args_message(self): + msg = "Reverse for 'places' with no arguments not found. 1 pattern(s) tried:" + with self.assertRaisesMessage(NoReverseMatch, msg): + reverse("places") + + def test_illegal_args_message(self): + msg = ( + "Reverse for 'places' with arguments '(1, 2)' not found. 1 pattern(s) " + "tried:" + ) + with self.assertRaisesMessage(NoReverseMatch, msg): + reverse("places", args=(1, 2)) + + def test_illegal_kwargs_message(self): + msg = ( + "Reverse for 'places' with keyword arguments '{'arg1': 2}' not found. 1 " + "pattern(s) tried:" + ) + with self.assertRaisesMessage(NoReverseMatch, msg): + reverse("places", kwargs={"arg1": 2}) + + +class ResolverTests(SimpleTestCase): + def test_resolver_repr(self): + """ + Test repr of URLResolver, especially when urlconf_name is a list + (#17892). + """ + # Pick a resolver from a namespaced URLconf + resolver = get_resolver("urlpatterns_reverse.namespace_urls") + sub_resolver = resolver.namespace_dict["test-ns1"][1] + self.assertIn("", repr(sub_resolver)) + + def test_reverse_lazy_object_coercion_by_resolve(self): + """ + Verifies lazy object returned by reverse_lazy is coerced to + text by resolve(). Previous to #21043, this would raise a TypeError. + """ + urls = "urlpatterns_reverse.named_urls" + proxy_url = reverse_lazy("named-url1", urlconf=urls) + resolver = get_resolver(urls) + resolver.resolve(proxy_url) + + def test_resolver_reverse(self): + resolver = get_resolver("urlpatterns_reverse.named_urls") + test_urls = [ + # (name, args, kwargs, expected) + ("named-url1", (), {}, ""), + ("named-url2", ("arg",), {}, "extra/arg/"), + ("named-url2", (), {"extra": "arg"}, "extra/arg/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(resolver.reverse(name, *args, **kwargs), expected) + + def test_resolver_reverse_conflict(self): + """ + URL pattern name arguments don't need to be unique. The last registered + pattern takes precedence for conflicting names. + """ + resolver = get_resolver("urlpatterns_reverse.named_urls_conflict") + test_urls = [ + # (name, args, kwargs, expected) + # Without arguments, the last URL in urlpatterns has precedence. + ("name-conflict", (), {}, "conflict/"), + # With an arg, the last URL in urlpatterns has precedence. + ("name-conflict", ("arg",), {}, "conflict-last/arg/"), + # With a kwarg, other URL patterns can be reversed. + ("name-conflict", (), {"first": "arg"}, "conflict-first/arg/"), + ("name-conflict", (), {"middle": "arg"}, "conflict-middle/arg/"), + ("name-conflict", (), {"last": "arg"}, "conflict-last/arg/"), + # The number and order of the arguments don't interfere with reversing. + ("name-conflict", ("arg", "arg"), {}, "conflict/arg/arg/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(resolver.reverse(name, *args, **kwargs), expected) + + def test_non_regex(self): + """ + A Resolver404 is raised if resolving doesn't meet the basic + requirements of a path to match - i.e., at the very least, it matches + the root pattern '^/'. Never return None from resolve() to prevent a + TypeError from occurring later (#10834). + """ + test_urls = ["", "a", "\\", "."] + for path_ in test_urls: + with self.subTest(path=path_): + with self.assertRaises(Resolver404): + resolve(path_) + + def test_404_tried_urls_have_names(self): + """ + The list of URLs that come back from a Resolver404 exception contains + a list in the right format for printing out in the DEBUG 404 page with + both the patterns and URL names, if available. + """ + urls = "urlpatterns_reverse.named_urls" + # this list matches the expected URL types and names returned when + # you try to resolve a nonexistent URL in the first level of included + # URLs in named_urls.py (e.g., '/included/nonexistent-url') + url_types_names = [ + [{"type": URLPattern, "name": "named-url1"}], + [{"type": URLPattern, "name": "named-url2"}], + [{"type": URLPattern, "name": None}], + [{"type": URLResolver}, {"type": URLPattern, "name": "named-url3"}], + [{"type": URLResolver}, {"type": URLPattern, "name": "named-url4"}], + [{"type": URLResolver}, {"type": URLPattern, "name": None}], + [{"type": URLResolver}, {"type": URLResolver}], + ] + with self.assertRaisesMessage(Resolver404, "tried") as cm: + resolve("/included/nonexistent-url", urlconf=urls) + e = cm.exception + # make sure we at least matched the root ('/') url resolver: + self.assertIn("tried", e.args[0]) + self.assertEqual( + len(e.args[0]["tried"]), + len(url_types_names), + "Wrong number of tried URLs returned. Expected %s, got %s." + % (len(url_types_names), len(e.args[0]["tried"])), + ) + for tried, expected in zip(e.args[0]["tried"], url_types_names): + for t, e in zip(tried, expected): + with self.subTest(t): + self.assertIsInstance( + t, e["type"] + ), "%s is not an instance of %s" % (t, e["type"]) + if "name" in e: + if not e["name"]: + self.assertIsNone( + t.name, "Expected no URL name but found %s." % t.name + ) + else: + self.assertEqual( + t.name, + e["name"], + 'Wrong URL name. Expected "%s", got "%s".' + % (e["name"], t.name), + ) + + def test_namespaced_view_detail(self): + resolver = get_resolver("urlpatterns_reverse.nested_urls") + self.assertTrue(resolver._is_callback("urlpatterns_reverse.nested_urls.view1")) + self.assertTrue(resolver._is_callback("urlpatterns_reverse.nested_urls.view2")) + self.assertTrue(resolver._is_callback("urlpatterns_reverse.nested_urls.View3")) + self.assertFalse(resolver._is_callback("urlpatterns_reverse.nested_urls.blub")) + + def test_view_detail_as_method(self): + # Views which have a class name as part of their path. + resolver = get_resolver("urlpatterns_reverse.method_view_urls") + self.assertTrue( + resolver._is_callback( + "urlpatterns_reverse.method_view_urls.ViewContainer.method_view" + ) + ) + self.assertTrue( + resolver._is_callback( + "urlpatterns_reverse.method_view_urls.ViewContainer.classmethod_view" + ) + ) + + def test_populate_concurrency(self): + """ + URLResolver._populate() can be called concurrently, but not more + than once per thread (#26888). + """ + resolver = URLResolver(RegexPattern(r"^/"), "urlpatterns_reverse.urls") + resolver._local.populating = True + thread = threading.Thread(target=resolver._populate) + thread.start() + thread.join() + self.assertNotEqual(resolver._reverse_dict, {}) + + +@override_settings(ROOT_URLCONF="urlpatterns_reverse.reverse_lazy_urls") +class ReverseLazyTest(TestCase): + def test_redirect_with_lazy_reverse(self): + response = self.client.get("/redirect/") + self.assertRedirects(response, "/redirected_to/", status_code=302) + + def test_user_permission_with_lazy_reverse(self): + alfred = User.objects.create_user( + "alfred", "alfred@example.com", password="testpw" + ) + response = self.client.get("/login_required_view/") + self.assertRedirects( + response, "/login/?next=/login_required_view/", status_code=302 + ) + self.client.force_login(alfred) + response = self.client.get("/login_required_view/") + self.assertEqual(response.status_code, 200) + + def test_inserting_reverse_lazy_into_string(self): + self.assertEqual( + "Some URL: %s" % reverse_lazy("some-login-page"), "Some URL: /login/" + ) + + def test_build_absolute_uri(self): + factory = RequestFactory() + request = factory.get("/") + self.assertEqual( + request.build_absolute_uri(reverse_lazy("some-login-page")), + "http://testserver/login/", + ) + + +class ReverseLazySettingsTest(AdminScriptTestCase): + """ + reverse_lazy can be used in settings without causing a circular + import error. + """ + + def setUp(self): + super().setUp() + self.write_settings( + "settings.py", + extra=( + "from django.urls import reverse_lazy\n" + "LOGIN_URL = reverse_lazy('login')" + ), + ) + + def test_lazy_in_settings(self): + out, err = self.run_manage(["check"]) + self.assertNoOutput(err) + + +@override_settings(ROOT_URLCONF="urlpatterns_reverse.urls") +class ReverseShortcutTests(SimpleTestCase): + def test_redirect_to_object(self): + # We don't really need a model; just something with a get_absolute_url + class FakeObj: + def get_absolute_url(self): + return "/hi-there/" + + res = redirect(FakeObj()) + self.assertIsInstance(res, HttpResponseRedirect) + self.assertEqual(res.url, "/hi-there/") + + res = redirect(FakeObj(), permanent=True) + self.assertIsInstance(res, HttpResponsePermanentRedirect) + self.assertEqual(res.url, "/hi-there/") + + def test_redirect_to_view_name(self): + res = redirect("hardcoded2") + self.assertEqual(res.url, "/hardcoded/doc.pdf") + res = redirect("places", 1) + self.assertEqual(res.url, "/places/1/") + res = redirect("headlines", year="2008", month="02", day="17") + self.assertEqual(res.url, "/headlines/2008.02.17/") + with self.assertRaises(NoReverseMatch): + redirect("not-a-view") + + def test_redirect_to_url(self): + res = redirect("/foo/") + self.assertEqual(res.url, "/foo/") + res = redirect("http://example.com/") + self.assertEqual(res.url, "http://example.com/") + # Assert that we can redirect using UTF-8 strings + res = redirect("/æøå/abc/") + self.assertEqual(res.url, "/%C3%A6%C3%B8%C3%A5/abc/") + # Assert that no imports are attempted when dealing with a relative path + # (previously, the below would resolve in a UnicodeEncodeError from __import__ ) + res = redirect("/æøå.abc/") + self.assertEqual(res.url, "/%C3%A6%C3%B8%C3%A5.abc/") + res = redirect("os.path") + self.assertEqual(res.url, "os.path") + + def test_no_illegal_imports(self): + # modules that are not listed in urlpatterns should not be importable + redirect("urlpatterns_reverse.nonimported_module.view") + self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules) + + def test_reverse_by_path_nested(self): + # Views added to urlpatterns using include() should be reversible. + from .views import nested_view + + self.assertEqual(reverse(nested_view), "/includes/nested_path/") + + def test_redirect_view_object(self): + from .views import absolute_kwargs_view + + res = redirect(absolute_kwargs_view) + self.assertEqual(res.url, "/absolute_arg_view/") + with self.assertRaises(NoReverseMatch): + redirect(absolute_kwargs_view, wrong_argument=None) + + +@override_settings(ROOT_URLCONF="urlpatterns_reverse.namespace_urls") +class NamespaceTests(SimpleTestCase): + def test_ambiguous_object(self): + """ + Names deployed via dynamic URL objects that require namespaces can't + be resolved. + """ + test_urls = [ + ("urlobject-view", [], {}), + ("urlobject-view", [37, 42], {}), + ("urlobject-view", [], {"arg1": 42, "arg2": 37}), + ] + for name, args, kwargs in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + with self.assertRaises(NoReverseMatch): + reverse(name, args=args, kwargs=kwargs) + + def test_ambiguous_urlpattern(self): + """ + Names deployed via dynamic URL objects that require namespaces can't + be resolved. + """ + test_urls = [ + ("inner-nothing", [], {}), + ("inner-nothing", [37, 42], {}), + ("inner-nothing", [], {"arg1": 42, "arg2": 37}), + ] + for name, args, kwargs in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + with self.assertRaises(NoReverseMatch): + reverse(name, args=args, kwargs=kwargs) + + def test_non_existent_namespace(self): + """Nonexistent namespaces raise errors.""" + test_urls = [ + "blahblah:urlobject-view", + "test-ns1:blahblah:urlobject-view", + ] + for name in test_urls: + with self.subTest(name=name): + with self.assertRaises(NoReverseMatch): + reverse(name) + + def test_normal_name(self): + """Normal lookups work as expected.""" + test_urls = [ + ("normal-view", [], {}, "/normal/"), + ("normal-view", [37, 42], {}, "/normal/37/42/"), + ("normal-view", [], {"arg1": 42, "arg2": 37}, "/normal/42/37/"), + ("special-view", [], {}, "/+%5C$*/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_simple_included_name(self): + """Normal lookups work on names included from other patterns.""" + test_urls = [ + ("included_namespace_urls:inc-normal-view", [], {}, "/included/normal/"), + ( + "included_namespace_urls:inc-normal-view", + [37, 42], + {}, + "/included/normal/37/42/", + ), + ( + "included_namespace_urls:inc-normal-view", + [], + {"arg1": 42, "arg2": 37}, + "/included/normal/42/37/", + ), + ("included_namespace_urls:inc-special-view", [], {}, "/included/+%5C$*/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_namespace_object(self): + """Dynamic URL objects can be found using a namespace.""" + test_urls = [ + ("test-ns1:urlobject-view", [], {}, "/test1/inner/"), + ("test-ns1:urlobject-view", [37, 42], {}, "/test1/inner/37/42/"), + ( + "test-ns1:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + "/test1/inner/42/37/", + ), + ("test-ns1:urlobject-special-view", [], {}, "/test1/inner/+%5C$*/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_app_object(self): + """ + Dynamic URL objects can return a (pattern, app_name) 2-tuple, and + include() can set the namespace. + """ + test_urls = [ + ("new-ns1:urlobject-view", [], {}, "/newapp1/inner/"), + ("new-ns1:urlobject-view", [37, 42], {}, "/newapp1/inner/37/42/"), + ( + "new-ns1:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + "/newapp1/inner/42/37/", + ), + ("new-ns1:urlobject-special-view", [], {}, "/newapp1/inner/+%5C$*/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_app_object_default_namespace(self): + """ + Namespace defaults to app_name when including a (pattern, app_name) + 2-tuple. + """ + test_urls = [ + ("newapp:urlobject-view", [], {}, "/new-default/inner/"), + ("newapp:urlobject-view", [37, 42], {}, "/new-default/inner/37/42/"), + ( + "newapp:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + "/new-default/inner/42/37/", + ), + ("newapp:urlobject-special-view", [], {}, "/new-default/inner/+%5C$*/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_embedded_namespace_object(self): + """Namespaces can be installed anywhere in the URL pattern tree.""" + test_urls = [ + ( + "included_namespace_urls:test-ns3:urlobject-view", + [], + {}, + "/included/test3/inner/", + ), + ( + "included_namespace_urls:test-ns3:urlobject-view", + [37, 42], + {}, + "/included/test3/inner/37/42/", + ), + ( + "included_namespace_urls:test-ns3:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + "/included/test3/inner/42/37/", + ), + ( + "included_namespace_urls:test-ns3:urlobject-special-view", + [], + {}, + "/included/test3/inner/+%5C$*/", + ), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_namespace_pattern(self): + """Namespaces can be applied to include()'d urlpatterns.""" + test_urls = [ + ("inc-ns1:inc-normal-view", [], {}, "/ns-included1/normal/"), + ("inc-ns1:inc-normal-view", [37, 42], {}, "/ns-included1/normal/37/42/"), + ( + "inc-ns1:inc-normal-view", + [], + {"arg1": 42, "arg2": 37}, + "/ns-included1/normal/42/37/", + ), + ("inc-ns1:inc-special-view", [], {}, "/ns-included1/+%5C$*/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_app_name_pattern(self): + """ + Namespaces can be applied to include()'d urlpatterns that set an + app_name attribute. + """ + test_urls = [ + ("app-ns1:inc-normal-view", [], {}, "/app-included1/normal/"), + ("app-ns1:inc-normal-view", [37, 42], {}, "/app-included1/normal/37/42/"), + ( + "app-ns1:inc-normal-view", + [], + {"arg1": 42, "arg2": 37}, + "/app-included1/normal/42/37/", + ), + ("app-ns1:inc-special-view", [], {}, "/app-included1/+%5C$*/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_namespace_pattern_with_variable_prefix(self): + """ + Using include() with namespaces when there is a regex variable in front + of it. + """ + test_urls = [ + ("inc-outer:inc-normal-view", [], {"outer": 42}, "/ns-outer/42/normal/"), + ("inc-outer:inc-normal-view", [42], {}, "/ns-outer/42/normal/"), + ( + "inc-outer:inc-normal-view", + [], + {"arg1": 37, "arg2": 4, "outer": 42}, + "/ns-outer/42/normal/37/4/", + ), + ("inc-outer:inc-normal-view", [42, 37, 4], {}, "/ns-outer/42/normal/37/4/"), + ("inc-outer:inc-special-view", [], {"outer": 42}, "/ns-outer/42/+%5C$*/"), + ("inc-outer:inc-special-view", [42], {}, "/ns-outer/42/+%5C$*/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_multiple_namespace_pattern(self): + """Namespaces can be embedded.""" + test_urls = [ + ("inc-ns1:test-ns3:urlobject-view", [], {}, "/ns-included1/test3/inner/"), + ( + "inc-ns1:test-ns3:urlobject-view", + [37, 42], + {}, + "/ns-included1/test3/inner/37/42/", + ), + ( + "inc-ns1:test-ns3:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + "/ns-included1/test3/inner/42/37/", + ), + ( + "inc-ns1:test-ns3:urlobject-special-view", + [], + {}, + "/ns-included1/test3/inner/+%5C$*/", + ), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_nested_namespace_pattern(self): + """Namespaces can be nested.""" + test_urls = [ + ( + "inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view", + [], + {}, + "/ns-included1/ns-included4/ns-included1/test3/inner/", + ), + ( + "inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view", + [37, 42], + {}, + "/ns-included1/ns-included4/ns-included1/test3/inner/37/42/", + ), + ( + "inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + "/ns-included1/ns-included4/ns-included1/test3/inner/42/37/", + ), + ( + "inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-special-view", + [], + {}, + "/ns-included1/ns-included4/ns-included1/test3/inner/+%5C$*/", + ), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_app_lookup_object(self): + """A default application namespace can be used for lookup.""" + test_urls = [ + ("testapp:urlobject-view", [], {}, "/default/inner/"), + ("testapp:urlobject-view", [37, 42], {}, "/default/inner/37/42/"), + ( + "testapp:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + "/default/inner/42/37/", + ), + ("testapp:urlobject-special-view", [], {}, "/default/inner/+%5C$*/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_app_lookup_object_with_default(self): + """A default application namespace is sensitive to the current app.""" + test_urls = [ + ("testapp:urlobject-view", [], {}, "test-ns3", "/default/inner/"), + ( + "testapp:urlobject-view", + [37, 42], + {}, + "test-ns3", + "/default/inner/37/42/", + ), + ( + "testapp:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + "test-ns3", + "/default/inner/42/37/", + ), + ( + "testapp:urlobject-special-view", + [], + {}, + "test-ns3", + "/default/inner/+%5C$*/", + ), + ] + for name, args, kwargs, current_app, expected in test_urls: + with self.subTest( + name=name, args=args, kwargs=kwargs, current_app=current_app + ): + self.assertEqual( + reverse(name, args=args, kwargs=kwargs, current_app=current_app), + expected, + ) + + def test_app_lookup_object_without_default(self): + """ + An application namespace without a default is sensitive to the current + app. + """ + test_urls = [ + ("nodefault:urlobject-view", [], {}, None, "/other2/inner/"), + ("nodefault:urlobject-view", [37, 42], {}, None, "/other2/inner/37/42/"), + ( + "nodefault:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + None, + "/other2/inner/42/37/", + ), + ("nodefault:urlobject-special-view", [], {}, None, "/other2/inner/+%5C$*/"), + ("nodefault:urlobject-view", [], {}, "other-ns1", "/other1/inner/"), + ( + "nodefault:urlobject-view", + [37, 42], + {}, + "other-ns1", + "/other1/inner/37/42/", + ), + ( + "nodefault:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + "other-ns1", + "/other1/inner/42/37/", + ), + ( + "nodefault:urlobject-special-view", + [], + {}, + "other-ns1", + "/other1/inner/+%5C$*/", + ), + ] + for name, args, kwargs, current_app, expected in test_urls: + with self.subTest( + name=name, args=args, kwargs=kwargs, current_app=current_app + ): + self.assertEqual( + reverse(name, args=args, kwargs=kwargs, current_app=current_app), + expected, + ) + + def test_special_chars_namespace(self): + test_urls = [ + ( + "special:included_namespace_urls:inc-normal-view", + [], + {}, + "/+%5C$*/included/normal/", + ), + ( + "special:included_namespace_urls:inc-normal-view", + [37, 42], + {}, + "/+%5C$*/included/normal/37/42/", + ), + ( + "special:included_namespace_urls:inc-normal-view", + [], + {"arg1": 42, "arg2": 37}, + "/+%5C$*/included/normal/42/37/", + ), + ( + "special:included_namespace_urls:inc-special-view", + [], + {}, + "/+%5C$*/included/+%5C$*/", + ), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_namespaces_with_variables(self): + """Namespace prefixes can capture variables.""" + test_urls = [ + ("inc-ns5:inner-nothing", [], {"outer": "70"}, "/inc70/"), + ( + "inc-ns5:inner-extra", + [], + {"extra": "foobar", "outer": "78"}, + "/inc78/extra/foobar/", + ), + ("inc-ns5:inner-nothing", ["70"], {}, "/inc70/"), + ("inc-ns5:inner-extra", ["78", "foobar"], {}, "/inc78/extra/foobar/"), + ] + for name, args, kwargs, expected in test_urls: + with self.subTest(name=name, args=args, kwargs=kwargs): + self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) + + def test_nested_app_lookup(self): + """ + A nested current_app should be split in individual namespaces (#24904). + """ + test_urls = [ + ( + "inc-ns1:testapp:urlobject-view", + [], + {}, + None, + "/ns-included1/test4/inner/", + ), + ( + "inc-ns1:testapp:urlobject-view", + [37, 42], + {}, + None, + "/ns-included1/test4/inner/37/42/", + ), + ( + "inc-ns1:testapp:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + None, + "/ns-included1/test4/inner/42/37/", + ), + ( + "inc-ns1:testapp:urlobject-special-view", + [], + {}, + None, + "/ns-included1/test4/inner/+%5C$*/", + ), + ( + "inc-ns1:testapp:urlobject-view", + [], + {}, + "inc-ns1:test-ns3", + "/ns-included1/test3/inner/", + ), + ( + "inc-ns1:testapp:urlobject-view", + [37, 42], + {}, + "inc-ns1:test-ns3", + "/ns-included1/test3/inner/37/42/", + ), + ( + "inc-ns1:testapp:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + "inc-ns1:test-ns3", + "/ns-included1/test3/inner/42/37/", + ), + ( + "inc-ns1:testapp:urlobject-special-view", + [], + {}, + "inc-ns1:test-ns3", + "/ns-included1/test3/inner/+%5C$*/", + ), + ] + for name, args, kwargs, current_app, expected in test_urls: + with self.subTest( + name=name, args=args, kwargs=kwargs, current_app=current_app + ): + self.assertEqual( + reverse(name, args=args, kwargs=kwargs, current_app=current_app), + expected, + ) + + def test_current_app_no_partial_match(self): + """current_app shouldn't be used unless it matches the whole path.""" + test_urls = [ + ( + "inc-ns1:testapp:urlobject-view", + [], + {}, + "nonexistent:test-ns3", + "/ns-included1/test4/inner/", + ), + ( + "inc-ns1:testapp:urlobject-view", + [37, 42], + {}, + "nonexistent:test-ns3", + "/ns-included1/test4/inner/37/42/", + ), + ( + "inc-ns1:testapp:urlobject-view", + [], + {"arg1": 42, "arg2": 37}, + "nonexistent:test-ns3", + "/ns-included1/test4/inner/42/37/", + ), + ( + "inc-ns1:testapp:urlobject-special-view", + [], + {}, + "nonexistent:test-ns3", + "/ns-included1/test4/inner/+%5C$*/", + ), + ] + for name, args, kwargs, current_app, expected in test_urls: + with self.subTest( + name=name, args=args, kwargs=kwargs, current_app=current_app + ): + self.assertEqual( + reverse(name, args=args, kwargs=kwargs, current_app=current_app), + expected, + ) + + +@override_settings(ROOT_URLCONF=urlconf_outer.__name__) +class RequestURLconfTests(SimpleTestCase): + def test_urlconf(self): + response = self.client.get("/test/me/") + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.content, b"outer:/test/me/,inner:/inner_urlconf/second_test/" + ) + response = self.client.get("/inner_urlconf/second_test/") + self.assertEqual(response.status_code, 200) + response = self.client.get("/second_test/") + self.assertEqual(response.status_code, 404) + + @override_settings( + MIDDLEWARE=[ + "%s.ChangeURLconfMiddleware" % middleware.__name__, + ] + ) + def test_urlconf_overridden(self): + response = self.client.get("/test/me/") + self.assertEqual(response.status_code, 404) + response = self.client.get("/inner_urlconf/second_test/") + self.assertEqual(response.status_code, 404) + response = self.client.get("/second_test/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"outer:,inner:/second_test/") + + @override_settings( + MIDDLEWARE=[ + "%s.NullChangeURLconfMiddleware" % middleware.__name__, + ] + ) + def test_urlconf_overridden_with_null(self): + """ + Overriding request.urlconf with None will fall back to the default + URLconf. + """ + response = self.client.get("/test/me/") + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.content, b"outer:/test/me/,inner:/inner_urlconf/second_test/" + ) + response = self.client.get("/inner_urlconf/second_test/") + self.assertEqual(response.status_code, 200) + response = self.client.get("/second_test/") + self.assertEqual(response.status_code, 404) + + @override_settings( + MIDDLEWARE=[ + "%s.ChangeURLconfMiddleware" % middleware.__name__, + "%s.ReverseInnerInResponseMiddleware" % middleware.__name__, + ] + ) + def test_reverse_inner_in_response_middleware(self): + """ + Test reversing an URL from the *overridden* URLconf from inside + a response middleware. + """ + response = self.client.get("/second_test/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b"/second_test/") + + @override_settings( + MIDDLEWARE=[ + "%s.ChangeURLconfMiddleware" % middleware.__name__, + "%s.ReverseOuterInResponseMiddleware" % middleware.__name__, + ] + ) + def test_reverse_outer_in_response_middleware(self): + """ + Test reversing an URL from the *default* URLconf from inside + a response middleware. + """ + msg = ( + "Reverse for 'outer' not found. 'outer' is not a valid view " + "function or pattern name." + ) + with self.assertRaisesMessage(NoReverseMatch, msg): + self.client.get("/second_test/") + + @override_settings( + MIDDLEWARE=[ + "%s.ChangeURLconfMiddleware" % middleware.__name__, + "%s.ReverseInnerInStreaming" % middleware.__name__, + ] + ) + def test_reverse_inner_in_streaming(self): + """ + Test reversing an URL from the *overridden* URLconf from inside + a streaming response. + """ + response = self.client.get("/second_test/") + self.assertEqual(response.status_code, 200) + self.assertEqual(b"".join(response), b"/second_test/") + + @override_settings( + MIDDLEWARE=[ + "%s.ChangeURLconfMiddleware" % middleware.__name__, + "%s.ReverseOuterInStreaming" % middleware.__name__, + ] + ) + def test_reverse_outer_in_streaming(self): + """ + Test reversing an URL from the *default* URLconf from inside + a streaming response. + """ + message = "Reverse for 'outer' not found." + with self.assertRaisesMessage(NoReverseMatch, message): + self.client.get("/second_test/") + b"".join(self.client.get("/second_test/")) + + def test_urlconf_is_reset_after_request(self): + """The URLconf is reset after each request.""" + self.assertIsNone(get_urlconf()) + with override_settings( + MIDDLEWARE=["%s.ChangeURLconfMiddleware" % middleware.__name__] + ): + self.client.get(reverse("inner")) + self.assertIsNone(get_urlconf()) + + +class ErrorHandlerResolutionTests(SimpleTestCase): + """Tests for handler400, handler404 and handler500""" + + def setUp(self): + urlconf = "urlpatterns_reverse.urls_error_handlers" + urlconf_callables = "urlpatterns_reverse.urls_error_handlers_callables" + self.resolver = URLResolver(RegexPattern(r"^$"), urlconf) + self.callable_resolver = URLResolver(RegexPattern(r"^$"), urlconf_callables) + + def test_named_handlers(self): + for code in [400, 404, 500]: + with self.subTest(code=code): + self.assertEqual(self.resolver.resolve_error_handler(code), empty_view) + + def test_callable_handlers(self): + for code in [400, 404, 500]: + with self.subTest(code=code): + self.assertEqual( + self.callable_resolver.resolve_error_handler(code), empty_view + ) + + +@override_settings(ROOT_URLCONF="urlpatterns_reverse.urls_without_handlers") +class DefaultErrorHandlerTests(SimpleTestCase): + def test_default_handler(self): + "If the urls.py doesn't specify handlers, the defaults are used" + response = self.client.get("/test/") + self.assertEqual(response.status_code, 404) + + msg = "I don't think I'm getting good value for this view" + with self.assertRaisesMessage(ValueError, msg): + self.client.get("/bad_view/") + + +@override_settings(ROOT_URLCONF=None) +class NoRootUrlConfTests(SimpleTestCase): + """Tests for handler404 and handler500 if ROOT_URLCONF is None""" + + def test_no_handler_exception(self): + msg = ( + "The included URLconf 'None' does not appear to have any patterns " + "in it. If you see the 'urlpatterns' variable with valid patterns " + "in the file then the issue is probably caused by a circular " + "import." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + self.client.get("/test/me/") + + +@override_settings(ROOT_URLCONF="urlpatterns_reverse.namespace_urls") +class ResolverMatchTests(SimpleTestCase): + def test_urlpattern_resolve(self): + for ( + path_, + url_name, + app_name, + namespace, + view_name, + func, + args, + kwargs, + ) in resolve_test_data: + with self.subTest(path=path_): + # Legacy support for extracting "function, args, kwargs". + match_func, match_args, match_kwargs = resolve(path_) + self.assertEqual(match_func, func) + self.assertEqual(match_args, args) + self.assertEqual(match_kwargs, kwargs) + # ResolverMatch capabilities. + match = resolve(path_) + self.assertEqual(match.__class__, ResolverMatch) + self.assertEqual(match.url_name, url_name) + self.assertEqual(match.app_name, app_name) + self.assertEqual(match.namespace, namespace) + self.assertEqual(match.view_name, view_name) + self.assertEqual(match.func, func) + self.assertEqual(match.args, args) + self.assertEqual(match.kwargs, kwargs) + # and for legacy purposes: + self.assertEqual(match[0], func) + self.assertEqual(match[1], args) + self.assertEqual(match[2], kwargs) + + def test_resolver_match_on_request(self): + response = self.client.get("/resolver_match/") + resolver_match = response.resolver_match + self.assertEqual(resolver_match.url_name, "test-resolver-match") + + def test_resolver_match_on_request_before_resolution(self): + request = HttpRequest() + self.assertIsNone(request.resolver_match) + + def test_repr(self): + self.assertEqual( + repr(resolve("/no_kwargs/42/37/")), + "ResolverMatch(func=urlpatterns_reverse.views.empty_view, " + "args=('42', '37'), kwargs={}, url_name='no-kwargs', app_names=[], " + "namespaces=[], route='^no_kwargs/([0-9]+)/([0-9]+)/$')", + ) + + def test_repr_extra_kwargs(self): + self.assertEqual( + repr(resolve("/mixed_args/1986/11/")), + "ResolverMatch(func=urlpatterns_reverse.views.empty_view, args=(), " + "kwargs={'arg2': '11', 'extra': True}, url_name='mixed-args', " + "app_names=[], namespaces=[], " + "route='^mixed_args/([0-9]+)/(?P[0-9]+)/$', " + "captured_kwargs={'arg2': '11'}, extra_kwargs={'extra': True})", + ) + + @override_settings(ROOT_URLCONF="urlpatterns_reverse.reverse_lazy_urls") + def test_classbased_repr(self): + self.assertEqual( + repr(resolve("/redirect/")), + "ResolverMatch(func=urlpatterns_reverse.views.LazyRedirectView, " + "args=(), kwargs={}, url_name=None, app_names=[], " + "namespaces=[], route='redirect/')", + ) + + @override_settings(ROOT_URLCONF="urlpatterns_reverse.urls") + def test_repr_functools_partial(self): + tests = [ + ("partial", "template.html"), + ("partial_nested", "nested_partial.html"), + ("partial_wrapped", "template.html"), + ] + for name, template_name in tests: + with self.subTest(name=name): + func = ( + f"functools.partial({views.empty_view!r}, " + f"template_name='{template_name}')" + ) + self.assertEqual( + repr(resolve(f"/{name}/")), + f"ResolverMatch(func={func}, args=(), kwargs={{}}, " + f"url_name='{name}', app_names=[], namespaces=[], " + f"route='{name}/')", + ) + + @override_settings(ROOT_URLCONF="urlpatterns.path_urls") + def test_pickling(self): + msg = "Cannot pickle ResolverMatch." + with self.assertRaisesMessage(pickle.PicklingError, msg): + pickle.dumps(resolve("/users/")) + + +@override_settings(ROOT_URLCONF="urlpatterns_reverse.erroneous_urls") +class ErroneousViewTests(SimpleTestCase): + def test_noncallable_view(self): + # View is not a callable (explicit import; arbitrary Python object) + with self.assertRaisesMessage(TypeError, "view must be a callable"): + path("uncallable-object/", views.uncallable) + + def test_invalid_regex(self): + # Regex contains an error (refs #6170) + msg = '(regex_error/$" is not a valid regular expression' + with self.assertRaisesMessage(ImproperlyConfigured, msg): + reverse(views.empty_view) + + +class ViewLoadingTests(SimpleTestCase): + def test_view_loading(self): + self.assertEqual( + get_callable("urlpatterns_reverse.views.empty_view"), empty_view + ) + self.assertEqual(get_callable(empty_view), empty_view) + + def test_view_does_not_exist(self): + msg = "View does not exist in module urlpatterns_reverse.views." + with self.assertRaisesMessage(ViewDoesNotExist, msg): + get_callable("urlpatterns_reverse.views.i_should_not_exist") + + def test_attributeerror_not_hidden(self): + msg = "I am here to confuse django.urls.get_callable" + with self.assertRaisesMessage(AttributeError, msg): + get_callable("urlpatterns_reverse.views_broken.i_am_broken") + + def test_non_string_value(self): + msg = "'1' is not a callable or a dot-notation path" + with self.assertRaisesMessage(ViewDoesNotExist, msg): + get_callable(1) + + def test_string_without_dot(self): + msg = "Could not import 'test'. The path must be fully qualified." + with self.assertRaisesMessage(ImportError, msg): + get_callable("test") + + def test_module_does_not_exist(self): + with self.assertRaisesMessage(ImportError, "No module named 'foo'"): + get_callable("foo.bar") + + def test_parent_module_does_not_exist(self): + msg = "Parent module urlpatterns_reverse.foo does not exist." + with self.assertRaisesMessage(ViewDoesNotExist, msg): + get_callable("urlpatterns_reverse.foo.bar") + + def test_not_callable(self): + msg = ( + "Could not import 'urlpatterns_reverse.tests.resolve_test_data'. " + "View is not callable." + ) + with self.assertRaisesMessage(ViewDoesNotExist, msg): + get_callable("urlpatterns_reverse.tests.resolve_test_data") + + +class IncludeTests(SimpleTestCase): + url_patterns = [ + path("inner/", views.empty_view, name="urlobject-view"), + re_path( + r"^inner/(?P[0-9]+)/(?P[0-9]+)/$", + views.empty_view, + name="urlobject-view", + ), + re_path(r"^inner/\+\\\$\*/$", views.empty_view, name="urlobject-special-view"), + ] + app_urls = URLObject("inc-app") + + def test_include_urls(self): + self.assertEqual(include(self.url_patterns), (self.url_patterns, None, None)) + + def test_include_namespace(self): + msg = ( + "Specifying a namespace in include() without providing an " + "app_name is not supported." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + include(self.url_patterns, "namespace") + + def test_include_4_tuple(self): + msg = "Passing a 4-tuple to include() is not supported." + with self.assertRaisesMessage(ImproperlyConfigured, msg): + include((self.url_patterns, "app_name", "namespace", "blah")) + + def test_include_3_tuple(self): + msg = "Passing a 3-tuple to include() is not supported." + with self.assertRaisesMessage(ImproperlyConfigured, msg): + include((self.url_patterns, "app_name", "namespace")) + + def test_include_3_tuple_namespace(self): + msg = ( + "Cannot override the namespace for a dynamic module that provides a " + "namespace." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + include((self.url_patterns, "app_name", "namespace"), "namespace") + + def test_include_2_tuple(self): + self.assertEqual( + include((self.url_patterns, "app_name")), + (self.url_patterns, "app_name", "app_name"), + ) + + def test_include_2_tuple_namespace(self): + self.assertEqual( + include((self.url_patterns, "app_name"), namespace="namespace"), + (self.url_patterns, "app_name", "namespace"), + ) + + def test_include_app_name(self): + self.assertEqual(include(self.app_urls), (self.app_urls, "inc-app", "inc-app")) + + def test_include_app_name_namespace(self): + self.assertEqual( + include(self.app_urls, "namespace"), (self.app_urls, "inc-app", "namespace") + ) + + +@override_settings(ROOT_URLCONF="urlpatterns_reverse.urls") +class LookaheadTests(SimpleTestCase): + def test_valid_resolve(self): + test_urls = [ + "/lookahead-/a-city/", + "/lookbehind-/a-city/", + "/lookahead+/a-city/", + "/lookbehind+/a-city/", + ] + for test_url in test_urls: + with self.subTest(url=test_url): + self.assertEqual(resolve(test_url).kwargs, {"city": "a-city"}) + + def test_invalid_resolve(self): + test_urls = [ + "/lookahead-/not-a-city/", + "/lookbehind-/not-a-city/", + "/lookahead+/other-city/", + "/lookbehind+/other-city/", + ] + for test_url in test_urls: + with self.subTest(url=test_url): + with self.assertRaises(Resolver404): + resolve(test_url) + + def test_valid_reverse(self): + test_urls = [ + ("lookahead-positive", {"city": "a-city"}, "/lookahead+/a-city/"), + ("lookahead-negative", {"city": "a-city"}, "/lookahead-/a-city/"), + ("lookbehind-positive", {"city": "a-city"}, "/lookbehind+/a-city/"), + ("lookbehind-negative", {"city": "a-city"}, "/lookbehind-/a-city/"), + ] + for name, kwargs, expected in test_urls: + with self.subTest(name=name, kwargs=kwargs): + self.assertEqual(reverse(name, kwargs=kwargs), expected) + + def test_invalid_reverse(self): + test_urls = [ + ("lookahead-positive", {"city": "other-city"}), + ("lookahead-negative", {"city": "not-a-city"}), + ("lookbehind-positive", {"city": "other-city"}), + ("lookbehind-negative", {"city": "not-a-city"}), + ] + for name, kwargs in test_urls: + with self.subTest(name=name, kwargs=kwargs): + with self.assertRaises(NoReverseMatch): + reverse(name, kwargs=kwargs) + + +@override_settings(ROOT_URLCONF="urlpatterns_reverse.urls") +class ReverseResolvedTests(SimpleTestCase): + def test_rereverse(self): + match = resolve("/resolved/12/") + self.assertEqual( + reverse(match.url_name, args=match.args, kwargs=match.kwargs), + "/resolved/12/", + ) + match = resolve("/resolved-overridden/12/url/") + self.assertEqual( + reverse(match.url_name, args=match.args, kwargs=match.captured_kwargs), + "/resolved-overridden/12/url/", + ) diff --git a/testbed/django__django/tests/urlpatterns_reverse/translations/__init__.py b/testbed/django__django/tests/urlpatterns_reverse/translations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/urlpatterns_reverse/translations/locale/__init__.py b/testbed/django__django/tests/urlpatterns_reverse/translations/locale/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/urlpatterns_reverse/translations/locale/de/LC_MESSAGES/django.mo b/testbed/django__django/tests/urlpatterns_reverse/translations/locale/de/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..fc53234553fdc7c38b72e9d143c0002ba23b9a8f Binary files /dev/null and b/testbed/django__django/tests/urlpatterns_reverse/translations/locale/de/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/tests/urlpatterns_reverse/translations/locale/de/LC_MESSAGES/django.po b/testbed/django__django/tests/urlpatterns_reverse/translations/locale/de/LC_MESSAGES/django.po new file mode 100644 index 0000000000000000000000000000000000000000..a4dde5ff1ba51a8e44d0797be83d1e6d3a409ecf --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/translations/locale/de/LC_MESSAGES/django.po @@ -0,0 +1,20 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: django tests\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2010-02-14 17:33+0100\n" +"PO-Revision-Date: 2011-01-21 21:37-0300\n" +"Last-Translator: Carl Meyer\n" +"Language-Team: de \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1)\n" + +msgid "^foo/$" +msgstr "^foo-de/$" diff --git a/testbed/django__django/tests/urlpatterns_reverse/translations/locale/de/__init__.py b/testbed/django__django/tests/urlpatterns_reverse/translations/locale/de/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/urlpatterns_reverse/translations/locale/fr/LC_MESSAGES/django.mo b/testbed/django__django/tests/urlpatterns_reverse/translations/locale/fr/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..e755e5baeade0069f91f3aafead4a676033a9475 Binary files /dev/null and b/testbed/django__django/tests/urlpatterns_reverse/translations/locale/fr/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/tests/urlpatterns_reverse/translations/locale/fr/LC_MESSAGES/django.po b/testbed/django__django/tests/urlpatterns_reverse/translations/locale/fr/LC_MESSAGES/django.po new file mode 100644 index 0000000000000000000000000000000000000000..de1b96611ce053774fcd3c5d5e28650153624e25 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/translations/locale/fr/LC_MESSAGES/django.po @@ -0,0 +1,20 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: django tests\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2010-02-14 17:33+0100\n" +"PO-Revision-Date: 2011-01-21 21:37-0300\n" +"Last-Translator: Carl Meyer\n" +"Language-Team: fr \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1)\n" + +msgid "^foo/$" +msgstr "^foo-fr/$" diff --git a/testbed/django__django/tests/urlpatterns_reverse/translations/locale/fr/__init__.py b/testbed/django__django/tests/urlpatterns_reverse/translations/locale/fr/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/urlpatterns_reverse/urlconf_inner.py b/testbed/django__django/tests/urlpatterns_reverse/urlconf_inner.py new file mode 100644 index 0000000000000000000000000000000000000000..5cd104c372042b1f8a311a85198cedc8bdf1abc6 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/urlconf_inner.py @@ -0,0 +1,16 @@ +from django.http import HttpResponse +from django.template import Context, Template +from django.urls import path + + +def inner_view(request): + content = Template( + '{% url "outer" as outer_url %}outer:{{ outer_url }},' + '{% url "inner" as inner_url %}inner:{{ inner_url }}' + ).render(Context()) + return HttpResponse(content) + + +urlpatterns = [ + path("second_test/", inner_view, name="inner"), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/urlconf_outer.py b/testbed/django__django/tests/urlpatterns_reverse/urlconf_outer.py new file mode 100644 index 0000000000000000000000000000000000000000..a15932bccf29178633509a10904cee5245ebf718 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/urlconf_outer.py @@ -0,0 +1,8 @@ +from django.urls import include, path + +from . import urlconf_inner + +urlpatterns = [ + path("test/me/", urlconf_inner.inner_view, name="outer"), + path("inner_urlconf/", include(urlconf_inner.__name__)), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/urls.py b/testbed/django__django/tests/urlpatterns_reverse/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..c7453314835a09479393cb0d60691d0372e70db7 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/urls.py @@ -0,0 +1,139 @@ +from django.urls import include, path, re_path + +from .views import ( + absolute_kwargs_view, + defaults_view, + empty_view, + empty_view_nested_partial, + empty_view_partial, + empty_view_wrapped, + nested_view, +) + +other_patterns = [ + path("non_path_include/", empty_view, name="non_path_include"), + path("nested_path/", nested_view), +] + +urlpatterns = [ + re_path(r"^places/([0-9]+)/$", empty_view, name="places"), + re_path(r"^places?/$", empty_view, name="places?"), + re_path(r"^places+/$", empty_view, name="places+"), + re_path(r"^places*/$", empty_view, name="places*"), + re_path(r"^(?:places/)?$", empty_view, name="places2?"), + re_path(r"^(?:places/)+$", empty_view, name="places2+"), + re_path(r"^(?:places/)*$", empty_view, name="places2*"), + re_path(r"^places/([0-9]+|[a-z_]+)/", empty_view, name="places3"), + re_path(r"^places/(?P[0-9]+)/$", empty_view, name="places4"), + re_path(r"^people/(?P\w+)/$", empty_view, name="people"), + re_path(r"^people/(?:name/)$", empty_view, name="people2"), + re_path(r"^people/(?:name/(\w+)/)?$", empty_view, name="people2a"), + re_path(r"^people/(?P\w+)-(?P=name)/$", empty_view, name="people_backref"), + re_path(r"^optional/(?P.*)/(?:.+/)?", empty_view, name="optional"), + re_path( + r"^optional/(?P\d+)/(?:(?P\d+)/)?", + absolute_kwargs_view, + name="named_optional", + ), + re_path( + r"^optional/(?P\d+)/(?:(?P\d+)/)?$", + absolute_kwargs_view, + name="named_optional_terminated", + ), + re_path( + r"^nested/noncapture/(?:(?P

    \w+))$", empty_view, name="nested-noncapture" + ), + re_path(r"^nested/capture/((\w+)/)?$", empty_view, name="nested-capture"), + re_path( + r"^nested/capture/mixed/((?P

    \w+))$", empty_view, name="nested-mixedcapture" + ), + re_path( + r"^nested/capture/named/(?P(?P\w+)/)?$", + empty_view, + name="nested-namedcapture", + ), + re_path(r"^hardcoded/$", empty_view, name="hardcoded"), + re_path(r"^hardcoded/doc\.pdf$", empty_view, name="hardcoded2"), + re_path(r"^people/(?P\w\w)/(?P\w+)/$", empty_view, name="people3"), + re_path(r"^people/(?P\w\w)/(?P[0-9])/$", empty_view, name="people4"), + re_path(r"^people/((?P\w\w)/test)?/(\w+)/$", empty_view, name="people6"), + re_path(r"^character_set/[abcdef0-9]/$", empty_view, name="range"), + re_path(r"^character_set/[\w]/$", empty_view, name="range2"), + re_path(r"^price/\$([0-9]+)/$", empty_view, name="price"), + re_path(r"^price/[$]([0-9]+)/$", empty_view, name="price2"), + re_path(r"^price/[\$]([0-9]+)/$", empty_view, name="price3"), + re_path( + r"^product/(?P\w+)\+\(\$(?P[0-9]+(\.[0-9]+)?)\)/$", + empty_view, + name="product", + ), + re_path( + r"^headlines/(?P[0-9]+)\.(?P[0-9]+)\.(?P[0-9]+)/$", + empty_view, + name="headlines", + ), + re_path( + r"^windows_path/(?P[A-Z]):\\(?P.+)/$", + empty_view, + name="windows", + ), + re_path(r"^special_chars/(?P.+)/$", empty_view, name="special"), + re_path(r"^resolved/(?P\d+)/$", empty_view, {"extra": True}, name="resolved"), + re_path( + r"^resolved-overridden/(?P\d+)/(?P\w+)/$", + empty_view, + {"extra": True, "overridden": "default"}, + name="resolved-overridden", + ), + re_path(r"^(?P.+)/[0-9]+/$", empty_view, name="mixed"), + re_path(r"^repeats/a{1,2}/$", empty_view, name="repeats"), + re_path(r"^repeats/a{2,4}/$", empty_view, name="repeats2"), + re_path(r"^repeats/a{2}/$", empty_view, name="repeats3"), + re_path(r"^test/1/?", empty_view, name="test"), + re_path(r"^outer/(?P[0-9]+)/", include("urlpatterns_reverse.included_urls")), + re_path( + r"^outer-no-kwargs/([0-9]+)/", + include("urlpatterns_reverse.included_no_kwargs_urls"), + ), + re_path("", include("urlpatterns_reverse.extra_urls")), + re_path( + r"^lookahead-/(?!not-a-city)(?P[^/]+)/$", + empty_view, + name="lookahead-negative", + ), + re_path( + r"^lookahead\+/(?=a-city)(?P[^/]+)/$", + empty_view, + name="lookahead-positive", + ), + re_path( + r"^lookbehind-/(?P[^/]+)(?[^/]+)(?<=a-city)/$", + empty_view, + name="lookbehind-positive", + ), + # Partials should be fine. + path("partial/", empty_view_partial, name="partial"), + path("partial_nested/", empty_view_nested_partial, name="partial_nested"), + path("partial_wrapped/", empty_view_wrapped, name="partial_wrapped"), + # This is non-reversible, but we shouldn't blow up when parsing it. + re_path(r"^(?:foo|bar)(\w+)/$", empty_view, name="disjunction"), + path("absolute_arg_view/", absolute_kwargs_view), + # Tests for #13154. Mixed syntax to test both ways of defining URLs. + re_path( + r"^defaults_view1/(?P[0-9]+)/$", + defaults_view, + {"arg2": 1}, + name="defaults", + ), + re_path( + r"^defaults_view2/(?P[0-9]+)/$", defaults_view, {"arg2": 2}, "defaults" + ), + path("includes/", include(other_patterns)), + # Security tests + re_path("(.+)/security/$", empty_view, name="security"), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/urls_error_handlers.py b/testbed/django__django/tests/urlpatterns_reverse/urls_error_handlers.py new file mode 100644 index 0000000000000000000000000000000000000000..7261a97e07f7d5e55cec17dd283d6efedfa722e2 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/urls_error_handlers.py @@ -0,0 +1,7 @@ +# Used by the ErrorHandlerResolutionTests test case. + +urlpatterns = [] + +handler400 = "urlpatterns_reverse.views.empty_view" +handler404 = "urlpatterns_reverse.views.empty_view" +handler500 = "urlpatterns_reverse.views.empty_view" diff --git a/testbed/django__django/tests/urlpatterns_reverse/urls_error_handlers_callables.py b/testbed/django__django/tests/urlpatterns_reverse/urls_error_handlers_callables.py new file mode 100644 index 0000000000000000000000000000000000000000..4a8d35116e5e24c9738ecd2c90864470a103fff5 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/urls_error_handlers_callables.py @@ -0,0 +1,9 @@ +# Used by the ErrorHandlerResolutionTests test case. + +from .views import empty_view + +urlpatterns = [] + +handler400 = empty_view +handler404 = empty_view +handler500 = empty_view diff --git a/testbed/django__django/tests/urlpatterns_reverse/urls_without_handlers.py b/testbed/django__django/tests/urlpatterns_reverse/urls_without_handlers.py new file mode 100644 index 0000000000000000000000000000000000000000..8d5ee0d34727396478dd76d073de588024b0968e --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/urls_without_handlers.py @@ -0,0 +1,9 @@ +# A URLconf that doesn't define any handlerXXX. +from django.urls import path + +from .views import bad_view, empty_view + +urlpatterns = [ + path("test_view/", empty_view, name="test_view"), + path("bad_view/", bad_view, name="bad_view"), +] diff --git a/testbed/django__django/tests/urlpatterns_reverse/utils.py b/testbed/django__django/tests/urlpatterns_reverse/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..82218f1de4a1be69a51854e0f51243d0c517052f --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/utils.py @@ -0,0 +1,27 @@ +from django.urls import path, re_path + +from . import views + + +class URLObject: + urlpatterns = [ + path("inner/", views.empty_view, name="urlobject-view"), + re_path( + r"^inner/(?P[0-9]+)/(?P[0-9]+)/$", + views.empty_view, + name="urlobject-view", + ), + re_path(r"^inner/\+\\\$\*/$", views.empty_view, name="urlobject-special-view"), + ] + + def __init__(self, app_name, namespace=None): + self.app_name = app_name + self.namespace = namespace + + @property + def urls(self): + return (self.urlpatterns, self.app_name), self.namespace + + @property + def app_urls(self): + return self.urlpatterns, self.app_name diff --git a/testbed/django__django/tests/urlpatterns_reverse/views.py b/testbed/django__django/tests/urlpatterns_reverse/views.py new file mode 100644 index 0000000000000000000000000000000000000000..17c7fe1c3d32e90094ba493bcdaa5ec122dee95a --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/views.py @@ -0,0 +1,68 @@ +from functools import partial, update_wrapper + +from django.contrib.auth.decorators import user_passes_test +from django.http import HttpResponse +from django.urls import reverse_lazy +from django.views.generic import RedirectView + + +def empty_view(request, *args, **kwargs): + return HttpResponse() + + +def absolute_kwargs_view(request, arg1=1, arg2=2): + return HttpResponse() + + +def defaults_view(request, arg1, arg2): + pass + + +def nested_view(request): + pass + + +def erroneous_view(request): + import non_existent # NOQA + + +def pass_resolver_match_view(request, *args, **kwargs): + response = HttpResponse() + response.resolver_match = request.resolver_match + return response + + +uncallable = None # neither a callable nor a string + + +class ViewClass: + def __call__(self, request, *args, **kwargs): + return HttpResponse() + + +view_class_instance = ViewClass() + + +class LazyRedirectView(RedirectView): + url = reverse_lazy("named-lazy-url-redirected-to") + + +@user_passes_test( + lambda u: u.is_authenticated, login_url=reverse_lazy("some-login-page") +) +def login_required_view(request): + return HttpResponse("Hello you") + + +def bad_view(request, *args, **kwargs): + raise ValueError("I don't think I'm getting good value for this view") + + +empty_view_partial = partial(empty_view, template_name="template.html") +empty_view_nested_partial = partial( + empty_view_partial, template_name="nested_partial.html" +) +empty_view_wrapped = update_wrapper( + partial(empty_view, template_name="template.html"), + empty_view, +) diff --git a/testbed/django__django/tests/urlpatterns_reverse/views_broken.py b/testbed/django__django/tests/urlpatterns_reverse/views_broken.py new file mode 100644 index 0000000000000000000000000000000000000000..6975941bdc71fd05d0442c5200aa224330da5825 --- /dev/null +++ b/testbed/django__django/tests/urlpatterns_reverse/views_broken.py @@ -0,0 +1,2 @@ +# I just raise an AttributeError to confuse the view loading mechanism +raise AttributeError("I am here to confuse django.urls.get_callable") diff --git a/testbed/django__django/tests/urls.py b/testbed/django__django/tests/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..7d3a3a790aeebf56410d3975b7c367ef077e1be7 --- /dev/null +++ b/testbed/django__django/tests/urls.py @@ -0,0 +1,7 @@ +"""This URLconf exists because Django expects ROOT_URLCONF to exist. URLs +should be added within the test folders, and use TestCase.urls to set them. +This helps the tests remain isolated. +""" + + +urlpatterns = [] diff --git a/testbed/django__django/tests/user_commands/__init__.py b/testbed/django__django/tests/user_commands/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/user_commands/eggs/basic.egg b/testbed/django__django/tests/user_commands/eggs/basic.egg new file mode 100644 index 0000000000000000000000000000000000000000..cb25c6d8cf03c65d0fb274e5e26ad0db9b9b305f Binary files /dev/null and b/testbed/django__django/tests/user_commands/eggs/basic.egg differ diff --git a/testbed/django__django/tests/user_commands/management/__init__.py b/testbed/django__django/tests/user_commands/management/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/user_commands/management/commands/__init__.py b/testbed/django__django/tests/user_commands/management/commands/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/user_commands/management/commands/common_args.py b/testbed/django__django/tests/user_commands/management/commands/common_args.py new file mode 100644 index 0000000000000000000000000000000000000000..ffc895b9fafda350f7a912b72b5474855c19ad28 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/common_args.py @@ -0,0 +1,16 @@ +from argparse import ArgumentError + +from django.core.management.base import BaseCommand, CommandError + + +class Command(BaseCommand): + def add_arguments(self, parser): + try: + parser.add_argument("--version", action="version", version="A.B.C") + except ArgumentError: + pass + else: + raise CommandError("--version argument does no yet exist") + + def handle(self, *args, **options): + return "Detected that --version already exists" diff --git a/testbed/django__django/tests/user_commands/management/commands/dance.py b/testbed/django__django/tests/user_commands/management/commands/dance.py new file mode 100644 index 0000000000000000000000000000000000000000..2bb8c1130915f36e296035da00336d0325b5ed74 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/dance.py @@ -0,0 +1,25 @@ +from django.core.management.base import BaseCommand, CommandError + + +class Command(BaseCommand): + help = "Dance around like a madman." + args = "" + requires_system_checks = "__all__" + + def add_arguments(self, parser): + parser.add_argument("integer", nargs="?", type=int, default=0) + parser.add_argument("-s", "--style", default="Rock'n'Roll") + parser.add_argument("-x", "--example") + parser.add_argument("--opt-3", action="store_true", dest="option3") + + def handle(self, *args, **options): + example = options["example"] + if example == "raise": + raise CommandError(returncode=3) + if options["verbosity"] > 0: + self.stdout.write("I don't feel like dancing %s." % options["style"]) + self.stdout.write(",".join(options)) + if options["integer"] > 0: + self.stdout.write( + "You passed %d as a positional argument." % options["integer"] + ) diff --git a/testbed/django__django/tests/user_commands/management/commands/hal.py b/testbed/django__django/tests/user_commands/management/commands/hal.py new file mode 100644 index 0000000000000000000000000000000000000000..6b9588b053c67f1d8b91540909c591acc377e55d --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/hal.py @@ -0,0 +1,32 @@ +from django.core.management.base import BaseCommand, CommandError + + +class Command(BaseCommand): + help = "Useless command." + + def add_arguments(self, parser): + parser.add_argument( + "args", + metavar="app_label", + nargs="*", + help="Specify the app label(s) to works on.", + ) + parser.add_argument("--empty", action="store_true", help="Do nothing.") + + def handle(self, *app_labels, **options): + app_labels = set(app_labels) + + if options["empty"]: + self.stdout.write() + self.stdout.write("Dave, I can't do that.") + return + + if not app_labels: + raise CommandError("I'm sorry Dave, I'm afraid I can't do that.") + + # raise an error if some --parameter is flowing from options to args + for app_label in app_labels: + if app_label.startswith("--"): + raise CommandError("Sorry, Dave, I can't let you do that.") + + self.stdout.write("Dave, my mind is going. I can feel it. I can feel it.") diff --git a/testbed/django__django/tests/user_commands/management/commands/mutually_exclusive_required.py b/testbed/django__django/tests/user_commands/management/commands/mutually_exclusive_required.py new file mode 100644 index 0000000000000000000000000000000000000000..421b3cbbdd9125d38e71f631d220d20d1bd2e0c4 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/mutually_exclusive_required.py @@ -0,0 +1,19 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + def add_arguments(self, parser): + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument("--foo-id", type=int, nargs="?", default=None) + group.add_argument("--foo-name", type=str, nargs="?", default=None) + group.add_argument("--foo-list", type=int, nargs="+") + group.add_argument("--append_const", action="append_const", const=42) + group.add_argument("--const", action="store_const", const=31) + group.add_argument("--count", action="count") + group.add_argument("--flag_false", action="store_false") + group.add_argument("--flag_true", action="store_true") + + def handle(self, *args, **options): + for option, value in options.items(): + if value is not None: + self.stdout.write("%s=%s" % (option, value)) diff --git a/testbed/django__django/tests/user_commands/management/commands/mutually_exclusive_required_with_same_dest.py b/testbed/django__django/tests/user_commands/management/commands/mutually_exclusive_required_with_same_dest.py new file mode 100644 index 0000000000000000000000000000000000000000..f6c9fc63a1ea2ec05d9feccc764c988a09075b4a --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/mutually_exclusive_required_with_same_dest.py @@ -0,0 +1,13 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + def add_arguments(self, parser): + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument("--for", dest="until", action="store") + group.add_argument("--until", action="store") + + def handle(self, *args, **options): + for option, value in options.items(): + if value is not None: + self.stdout.write("%s=%s" % (option, value)) diff --git a/testbed/django__django/tests/user_commands/management/commands/no_system_checks.py b/testbed/django__django/tests/user_commands/management/commands/no_system_checks.py new file mode 100644 index 0000000000000000000000000000000000000000..40c6051c53d2c7af0c0728ed983989a19611ec2c --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/no_system_checks.py @@ -0,0 +1,8 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + requires_system_checks = [] + + def handle(self, *args, **options): + pass diff --git a/testbed/django__django/tests/user_commands/management/commands/no_translations.py b/testbed/django__django/tests/user_commands/management/commands/no_translations.py new file mode 100644 index 0000000000000000000000000000000000000000..fd1b6255f77667be4f9c263fbeb8735a9a3f7d94 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/no_translations.py @@ -0,0 +1,8 @@ +from django.core.management.base import BaseCommand, no_translations +from django.utils import translation + + +class Command(BaseCommand): + @no_translations + def handle(self, *args, **options): + return translation.get_language() diff --git a/testbed/django__django/tests/user_commands/management/commands/outputwrapper.py b/testbed/django__django/tests/user_commands/management/commands/outputwrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..0bff3a49fbc0e7be9c02208610ee2c4b0cfe87a6 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/outputwrapper.py @@ -0,0 +1,8 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + def handle(self, **options): + self.stdout.write("Working...") + self.stdout.flush() + self.stdout.write("OK") diff --git a/testbed/django__django/tests/user_commands/management/commands/required_constant_option.py b/testbed/django__django/tests/user_commands/management/commands/required_constant_option.py new file mode 100644 index 0000000000000000000000000000000000000000..5395aac23696d9565804b6c2683247b83725dbb4 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/required_constant_option.py @@ -0,0 +1,20 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + def add_arguments(self, parser): + parser.add_argument( + "--append_const", + action="append_const", + const=42, + required=True, + ) + parser.add_argument("--const", action="store_const", const=31, required=True) + parser.add_argument("--count", action="count", required=True) + parser.add_argument("--flag_false", action="store_false", required=True) + parser.add_argument("--flag_true", action="store_true", required=True) + + def handle(self, *args, **options): + for option, value in options.items(): + if value is not None: + self.stdout.write("%s=%s" % (option, value)) diff --git a/testbed/django__django/tests/user_commands/management/commands/required_list_option.py b/testbed/django__django/tests/user_commands/management/commands/required_list_option.py new file mode 100644 index 0000000000000000000000000000000000000000..bbff9b53f644504f9ce1859964f1613c41a55da7 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/required_list_option.py @@ -0,0 +1,10 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + def add_arguments(self, parser): + parser.add_argument("--foo-list", nargs="+", type=int, required=True) + + def handle(self, *args, **options): + for option, value in options.items(): + self.stdout.write("%s=%s" % (option, value)) diff --git a/testbed/django__django/tests/user_commands/management/commands/required_option.py b/testbed/django__django/tests/user_commands/management/commands/required_option.py new file mode 100644 index 0000000000000000000000000000000000000000..da847021894b0ac264aeae78091435516b4f31ff --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/required_option.py @@ -0,0 +1,10 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + def add_arguments(self, parser): + parser.add_argument("-n", "--need-me", required=True) + parser.add_argument("-t", "--need-me-too", required=True, dest="needme2") + + def handle(self, *args, **options): + self.stdout.write(",".join(options)) diff --git a/testbed/django__django/tests/user_commands/management/commands/reverse_url.py b/testbed/django__django/tests/user_commands/management/commands/reverse_url.py new file mode 100644 index 0000000000000000000000000000000000000000..b1fb5ad8ec42c3776dc55f0cb4acb9c792ea16c6 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/reverse_url.py @@ -0,0 +1,11 @@ +from django.core.management.base import BaseCommand +from django.urls import reverse + + +class Command(BaseCommand): + """ + This command returns a URL from a reverse() call. + """ + + def handle(self, *args, **options): + return reverse("some_url") diff --git a/testbed/django__django/tests/user_commands/management/commands/set_option.py b/testbed/django__django/tests/user_commands/management/commands/set_option.py new file mode 100644 index 0000000000000000000000000000000000000000..5f8c01e541e9d141cd63afe3d67dfd16a0fcb1dc --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/set_option.py @@ -0,0 +1,9 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + def add_arguments(self, parser): + parser.add_argument("--set") + + def handle(self, **options): + self.stdout.write("Set %s" % options["set"]) diff --git a/testbed/django__django/tests/user_commands/management/commands/specific_system_checks.py b/testbed/django__django/tests/user_commands/management/commands/specific_system_checks.py new file mode 100644 index 0000000000000000000000000000000000000000..5551b2ab36e1b39cc986f0cb0632f0e1f077c419 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/specific_system_checks.py @@ -0,0 +1,9 @@ +from django.core.checks import Tags +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + requires_system_checks = [Tags.staticfiles, Tags.models] + + def handle(self, *args, **options): + pass diff --git a/testbed/django__django/tests/user_commands/management/commands/subparser.py b/testbed/django__django/tests/user_commands/management/commands/subparser.py new file mode 100644 index 0000000000000000000000000000000000000000..908a9f297673220f673432c400d711661cbf0952 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/subparser.py @@ -0,0 +1,11 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + def add_arguments(self, parser): + subparsers = parser.add_subparsers() + parser_foo = subparsers.add_parser("foo") + parser_foo.add_argument("bar", type=int) + + def handle(self, *args, **options): + self.stdout.write(",".join(options)) diff --git a/testbed/django__django/tests/user_commands/management/commands/subparser_dest.py b/testbed/django__django/tests/user_commands/management/commands/subparser_dest.py new file mode 100644 index 0000000000000000000000000000000000000000..cc2ebb527239e98770b21d126fb195a63ded9de5 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/subparser_dest.py @@ -0,0 +1,11 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + def add_arguments(self, parser): + subparsers = parser.add_subparsers(dest="subcommand", required=True) + parser_foo = subparsers.add_parser("foo") + parser_foo.add_argument("--bar") + + def handle(self, *args, **options): + self.stdout.write(",".join(options)) diff --git a/testbed/django__django/tests/user_commands/management/commands/subparser_required.py b/testbed/django__django/tests/user_commands/management/commands/subparser_required.py new file mode 100644 index 0000000000000000000000000000000000000000..e9bff797523929227bbe14770f0ff8e6cd66f1ec --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/subparser_required.py @@ -0,0 +1,13 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + def add_arguments(self, parser): + subparsers_1 = parser.add_subparsers(dest="subcommand_1") + parser_foo_1 = subparsers_1.add_parser("foo_1") + subparsers_2 = parser_foo_1.add_subparsers(dest="subcommand_2") + parser_foo_2 = subparsers_2.add_parser("foo_2") + parser_foo_2.add_argument("--bar", required=True) + + def handle(self, *args, **options): + self.stdout.write(",".join(options)) diff --git a/testbed/django__django/tests/user_commands/management/commands/subparser_vanilla.py b/testbed/django__django/tests/user_commands/management/commands/subparser_vanilla.py new file mode 100644 index 0000000000000000000000000000000000000000..1605f21cadcce50963910bc816aab142197708c9 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/subparser_vanilla.py @@ -0,0 +1,13 @@ +import argparse + +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + def add_arguments(self, parser): + subparsers = parser.add_subparsers(parser_class=argparse.ArgumentParser) + parser_foo = subparsers.add_parser("foo") + parser_foo.add_argument("bar", type=int) + + def handle(self, *args, **options): + pass diff --git a/testbed/django__django/tests/user_commands/management/commands/transaction.py b/testbed/django__django/tests/user_commands/management/commands/transaction.py new file mode 100644 index 0000000000000000000000000000000000000000..f8e28ff7ea3ec31dd0fc178108249c6f7e594d67 --- /dev/null +++ b/testbed/django__django/tests/user_commands/management/commands/transaction.py @@ -0,0 +1,10 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + help = "Say hello." + args = "" + output_transaction = True + + def handle(self, *args, **options): + return "Hello!" diff --git a/testbed/django__django/tests/user_commands/models.py b/testbed/django__django/tests/user_commands/models.py new file mode 100644 index 0000000000000000000000000000000000000000..51fd45333cb95f1bfa857b9a307fcb9902c3f1a2 --- /dev/null +++ b/testbed/django__django/tests/user_commands/models.py @@ -0,0 +1,14 @@ +""" +User-registered management commands + +The ``manage.py`` utility provides a number of useful commands for managing a +Django project. If you want to add a utility command of your own, you can. + +The user-defined command ``dance`` is defined in the management/commands +subdirectory of this test application. It is a simple command that responds +with a printed message when invoked. + +For more details on how to define your own ``manage.py`` commands, look at the +``django.core.management.commands`` directory. This directory contains the +definitions for the base Django ``manage.py`` commands. +""" diff --git a/testbed/django__django/tests/user_commands/tests.py b/testbed/django__django/tests/user_commands/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..408108b5525ffe6bc86f6cb326817835b198bf21 --- /dev/null +++ b/testbed/django__django/tests/user_commands/tests.py @@ -0,0 +1,536 @@ +import os +from argparse import ArgumentDefaultsHelpFormatter +from io import StringIO +from unittest import mock + +from admin_scripts.tests import AdminScriptTestCase + +from django.apps import apps +from django.core import management +from django.core.checks import Tags +from django.core.management import BaseCommand, CommandError, find_commands +from django.core.management.utils import ( + find_command, + get_random_secret_key, + is_ignored_path, + normalize_path_patterns, + popen_wrapper, +) +from django.db import connection +from django.test import SimpleTestCase, override_settings +from django.test.utils import captured_stderr, extend_sys_path +from django.utils import translation + +from .management.commands import dance + + +# A minimal set of apps to avoid system checks running on all apps. +@override_settings( + INSTALLED_APPS=[ + "django.contrib.auth", + "django.contrib.contenttypes", + "user_commands", + ], +) +class CommandTests(SimpleTestCase): + def test_command(self): + out = StringIO() + management.call_command("dance", stdout=out) + self.assertIn("I don't feel like dancing Rock'n'Roll.\n", out.getvalue()) + + def test_command_style(self): + out = StringIO() + management.call_command("dance", style="Jive", stdout=out) + self.assertIn("I don't feel like dancing Jive.\n", out.getvalue()) + # Passing options as arguments also works (thanks argparse) + management.call_command("dance", "--style", "Jive", stdout=out) + self.assertIn("I don't feel like dancing Jive.\n", out.getvalue()) + + def test_language_preserved(self): + with translation.override("fr"): + management.call_command("dance", verbosity=0) + self.assertEqual(translation.get_language(), "fr") + + def test_explode(self): + """An unknown command raises CommandError""" + with self.assertRaisesMessage(CommandError, "Unknown command: 'explode'"): + management.call_command(("explode",)) + + def test_system_exit(self): + """Exception raised in a command should raise CommandError with + call_command, but SystemExit when run from command line + """ + with self.assertRaises(CommandError) as cm: + management.call_command("dance", example="raise") + self.assertEqual(cm.exception.returncode, 3) + dance.Command.requires_system_checks = [] + try: + with captured_stderr() as stderr, self.assertRaises(SystemExit) as cm: + management.ManagementUtility( + ["manage.py", "dance", "--example=raise"] + ).execute() + self.assertEqual(cm.exception.code, 3) + finally: + dance.Command.requires_system_checks = "__all__" + self.assertIn("CommandError", stderr.getvalue()) + + def test_no_translations_deactivate_translations(self): + """ + When the Command handle method is decorated with @no_translations, + translations are deactivated inside the command. + """ + current_locale = translation.get_language() + with translation.override("pl"): + result = management.call_command("no_translations") + self.assertIsNone(result) + self.assertEqual(translation.get_language(), current_locale) + + def test_find_command_without_PATH(self): + """ + find_command should still work when the PATH environment variable + doesn't exist (#22256). + """ + current_path = os.environ.pop("PATH", None) + + try: + self.assertIsNone(find_command("_missing_")) + finally: + if current_path is not None: + os.environ["PATH"] = current_path + + def test_discover_commands_in_eggs(self): + """ + Management commands can also be loaded from Python eggs. + """ + egg_dir = "%s/eggs" % os.path.dirname(__file__) + egg_name = "%s/basic.egg" % egg_dir + with extend_sys_path(egg_name): + with self.settings(INSTALLED_APPS=["commandegg"]): + cmds = find_commands( + os.path.join(apps.get_app_config("commandegg").path, "management") + ) + self.assertEqual(cmds, ["eggcommand"]) + + def test_call_command_option_parsing(self): + """ + When passing the long option name to call_command, the available option + key is the option dest name (#22985). + """ + out = StringIO() + management.call_command("dance", stdout=out, opt_3=True) + self.assertIn("option3", out.getvalue()) + self.assertNotIn("opt_3", out.getvalue()) + self.assertNotIn("opt-3", out.getvalue()) + + def test_call_command_option_parsing_non_string_arg(self): + """ + It should be possible to pass non-string arguments to call_command. + """ + out = StringIO() + management.call_command("dance", 1, verbosity=0, stdout=out) + self.assertIn("You passed 1 as a positional argument.", out.getvalue()) + + def test_calling_a_command_with_only_empty_parameter_should_ends_gracefully(self): + out = StringIO() + management.call_command("hal", "--empty", stdout=out) + self.assertEqual(out.getvalue(), "\nDave, I can't do that.\n") + + def test_calling_command_with_app_labels_and_parameters_should_be_ok(self): + out = StringIO() + management.call_command("hal", "myapp", "--verbosity", "3", stdout=out) + self.assertIn( + "Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue() + ) + + def test_calling_command_with_parameters_and_app_labels_at_the_end_should_be_ok( + self, + ): + out = StringIO() + management.call_command("hal", "--verbosity", "3", "myapp", stdout=out) + self.assertIn( + "Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue() + ) + + def test_calling_a_command_with_no_app_labels_and_parameters_raise_command_error( + self, + ): + with self.assertRaises(CommandError): + management.call_command("hal") + + def test_output_transaction(self): + output = management.call_command( + "transaction", stdout=StringIO(), no_color=True + ) + self.assertTrue( + output.strip().startswith(connection.ops.start_transaction_sql()) + ) + self.assertTrue(output.strip().endswith(connection.ops.end_transaction_sql())) + + def test_call_command_no_checks(self): + """ + By default, call_command should not trigger the check framework, unless + specifically asked. + """ + self.counter = 0 + + def patched_check(self_, **kwargs): + self.counter += 1 + self.kwargs = kwargs + + saved_check = BaseCommand.check + BaseCommand.check = patched_check + try: + management.call_command("dance", verbosity=0) + self.assertEqual(self.counter, 0) + management.call_command("dance", verbosity=0, skip_checks=False) + self.assertEqual(self.counter, 1) + self.assertEqual(self.kwargs, {}) + finally: + BaseCommand.check = saved_check + + def test_requires_system_checks_empty(self): + with mock.patch( + "django.core.management.base.BaseCommand.check" + ) as mocked_check: + management.call_command("no_system_checks") + self.assertIs(mocked_check.called, False) + + def test_requires_system_checks_specific(self): + with mock.patch( + "django.core.management.base.BaseCommand.check" + ) as mocked_check: + management.call_command("specific_system_checks", skip_checks=False) + mocked_check.assert_called_once_with(tags=[Tags.staticfiles, Tags.models]) + + def test_requires_system_checks_invalid(self): + class Command(BaseCommand): + requires_system_checks = "x" + + msg = "requires_system_checks must be a list or tuple." + with self.assertRaisesMessage(TypeError, msg): + Command() + + def test_check_migrations(self): + requires_migrations_checks = dance.Command.requires_migrations_checks + self.assertIs(requires_migrations_checks, False) + try: + with mock.patch.object(BaseCommand, "check_migrations") as check_migrations: + management.call_command("dance", verbosity=0) + self.assertFalse(check_migrations.called) + dance.Command.requires_migrations_checks = True + management.call_command("dance", verbosity=0) + self.assertTrue(check_migrations.called) + finally: + dance.Command.requires_migrations_checks = requires_migrations_checks + + def test_call_command_unrecognized_option(self): + msg = ( + "Unknown option(s) for dance command: unrecognized. Valid options " + "are: example, force_color, help, integer, no_color, opt_3, " + "option3, pythonpath, settings, skip_checks, stderr, stdout, " + "style, traceback, verbosity, version." + ) + with self.assertRaisesMessage(TypeError, msg): + management.call_command("dance", unrecognized=1) + + msg = ( + "Unknown option(s) for dance command: unrecognized, unrecognized2. " + "Valid options are: example, force_color, help, integer, no_color, " + "opt_3, option3, pythonpath, settings, skip_checks, stderr, " + "stdout, style, traceback, verbosity, version." + ) + with self.assertRaisesMessage(TypeError, msg): + management.call_command("dance", unrecognized=1, unrecognized2=1) + + def test_call_command_with_required_parameters_in_options(self): + out = StringIO() + management.call_command( + "required_option", need_me="foo", needme2="bar", stdout=out + ) + self.assertIn("need_me", out.getvalue()) + self.assertIn("needme2", out.getvalue()) + + def test_call_command_with_required_parameters_in_mixed_options(self): + out = StringIO() + management.call_command( + "required_option", "--need-me=foo", needme2="bar", stdout=out + ) + self.assertIn("need_me", out.getvalue()) + self.assertIn("needme2", out.getvalue()) + + def test_command_add_arguments_after_common_arguments(self): + out = StringIO() + management.call_command("common_args", stdout=out) + self.assertIn("Detected that --version already exists", out.getvalue()) + + def test_mutually_exclusive_group_required_options(self): + out = StringIO() + management.call_command("mutually_exclusive_required", foo_id=1, stdout=out) + self.assertIn("foo_id", out.getvalue()) + management.call_command( + "mutually_exclusive_required", foo_name="foo", stdout=out + ) + self.assertIn("foo_name", out.getvalue()) + msg = ( + "Error: one of the arguments --foo-id --foo-name --foo-list " + "--append_const --const --count --flag_false --flag_true is " + "required" + ) + with self.assertRaisesMessage(CommandError, msg): + management.call_command("mutually_exclusive_required", stdout=out) + + def test_mutually_exclusive_group_required_const_options(self): + tests = [ + ("append_const", [42]), + ("const", 31), + ("count", 1), + ("flag_false", False), + ("flag_true", True), + ] + for arg, value in tests: + out = StringIO() + expected_output = "%s=%s" % (arg, value) + with self.subTest(arg=arg): + management.call_command( + "mutually_exclusive_required", + "--%s" % arg, + stdout=out, + ) + self.assertIn(expected_output, out.getvalue()) + out.truncate(0) + management.call_command( + "mutually_exclusive_required", + **{arg: value, "stdout": out}, + ) + self.assertIn(expected_output, out.getvalue()) + + def test_mutually_exclusive_group_required_with_same_dest_options(self): + tests = [ + {"until": "2"}, + {"for": "1", "until": "2"}, + ] + msg = ( + "Cannot pass the dest 'until' that matches multiple arguments via " + "**options." + ) + for options in tests: + with self.subTest(options=options): + with self.assertRaisesMessage(TypeError, msg): + management.call_command( + "mutually_exclusive_required_with_same_dest", + **options, + ) + + def test_mutually_exclusive_group_required_with_same_dest_args(self): + tests = [ + ("--until=1",), + ("--until", 1), + ("--for=1",), + ("--for", 1), + ] + for args in tests: + out = StringIO() + with self.subTest(options=args): + management.call_command( + "mutually_exclusive_required_with_same_dest", + *args, + stdout=out, + ) + output = out.getvalue() + self.assertIn("until=1", output) + + def test_required_list_option(self): + tests = [ + (("--foo-list", [1, 2]), {}), + ((), {"foo_list": [1, 2]}), + ] + for command in ["mutually_exclusive_required", "required_list_option"]: + for args, kwargs in tests: + with self.subTest(command=command, args=args, kwargs=kwargs): + out = StringIO() + management.call_command( + command, + *args, + **{**kwargs, "stdout": out}, + ) + self.assertIn("foo_list=[1, 2]", out.getvalue()) + + def test_required_const_options(self): + args = { + "append_const": [42], + "const": 31, + "count": 1, + "flag_false": False, + "flag_true": True, + } + expected_output = "\n".join( + "%s=%s" % (arg, value) for arg, value in args.items() + ) + out = StringIO() + management.call_command( + "required_constant_option", + "--append_const", + "--const", + "--count", + "--flag_false", + "--flag_true", + stdout=out, + ) + self.assertIn(expected_output, out.getvalue()) + out.truncate(0) + management.call_command("required_constant_option", **{**args, "stdout": out}) + self.assertIn(expected_output, out.getvalue()) + + def test_subparser(self): + out = StringIO() + management.call_command("subparser", "foo", 12, stdout=out) + self.assertIn("bar", out.getvalue()) + + def test_subparser_dest_args(self): + out = StringIO() + management.call_command("subparser_dest", "foo", bar=12, stdout=out) + self.assertIn("bar", out.getvalue()) + + def test_subparser_dest_required_args(self): + out = StringIO() + management.call_command( + "subparser_required", "foo_1", "foo_2", bar=12, stdout=out + ) + self.assertIn("bar", out.getvalue()) + + def test_subparser_invalid_option(self): + msg = "invalid choice: 'test' (choose from 'foo')" + with self.assertRaisesMessage(CommandError, msg): + management.call_command("subparser", "test", 12) + msg = "Error: the following arguments are required: subcommand" + with self.assertRaisesMessage(CommandError, msg): + management.call_command("subparser_dest", subcommand="foo", bar=12) + + def test_create_parser_kwargs(self): + """BaseCommand.create_parser() passes kwargs to CommandParser.""" + epilog = "some epilog text" + parser = BaseCommand().create_parser( + "prog_name", + "subcommand", + epilog=epilog, + formatter_class=ArgumentDefaultsHelpFormatter, + ) + self.assertEqual(parser.epilog, epilog) + self.assertEqual(parser.formatter_class, ArgumentDefaultsHelpFormatter) + + def test_outputwrapper_flush(self): + out = StringIO() + with mock.patch.object(out, "flush") as mocked_flush: + management.call_command("outputwrapper", stdout=out) + self.assertIn("Working...", out.getvalue()) + self.assertIs(mocked_flush.called, True) + + +class CommandRunTests(AdminScriptTestCase): + """ + Tests that need to run by simulating the command line, not by call_command. + """ + + def test_script_prefix_set_in_commands(self): + self.write_settings( + "settings.py", + apps=["user_commands"], + sdict={ + "ROOT_URLCONF": '"user_commands.urls"', + "FORCE_SCRIPT_NAME": '"/PREFIX/"', + }, + ) + out, err = self.run_manage(["reverse_url"]) + self.assertNoOutput(err) + self.assertEqual(out.strip(), "/PREFIX/some/url/") + + def test_disallowed_abbreviated_options(self): + """ + To avoid conflicts with custom options, commands don't allow + abbreviated forms of the --setting and --pythonpath options. + """ + self.write_settings("settings.py", apps=["user_commands"]) + out, err = self.run_manage(["set_option", "--set", "foo"]) + self.assertNoOutput(err) + self.assertEqual(out.strip(), "Set foo") + + def test_skip_checks(self): + self.write_settings( + "settings.py", + apps=["django.contrib.staticfiles", "user_commands"], + sdict={ + # (staticfiles.E001) The STATICFILES_DIRS setting is not a tuple or + # list. + "STATICFILES_DIRS": '"foo"', + }, + ) + out, err = self.run_manage(["set_option", "--skip-checks", "--set", "foo"]) + self.assertNoOutput(err) + self.assertEqual(out.strip(), "Set foo") + + def test_subparser_error_formatting(self): + self.write_settings("settings.py", apps=["user_commands"]) + out, err = self.run_manage(["subparser", "foo", "twelve"]) + self.maxDiff = None + self.assertNoOutput(out) + err_lines = err.splitlines() + self.assertEqual(len(err_lines), 2) + self.assertEqual( + err_lines[1], + "manage.py subparser foo: error: argument bar: invalid int value: 'twelve'", + ) + + def test_subparser_non_django_error_formatting(self): + self.write_settings("settings.py", apps=["user_commands"]) + out, err = self.run_manage(["subparser_vanilla", "foo", "seven"]) + self.assertNoOutput(out) + err_lines = err.splitlines() + self.assertEqual(len(err_lines), 2) + self.assertEqual( + err_lines[1], + "manage.py subparser_vanilla foo: error: argument bar: invalid int value: " + "'seven'", + ) + + +class UtilsTests(SimpleTestCase): + def test_no_existent_external_program(self): + msg = "Error executing a_42_command_that_doesnt_exist_42" + with self.assertRaisesMessage(CommandError, msg): + popen_wrapper(["a_42_command_that_doesnt_exist_42"]) + + def test_get_random_secret_key(self): + key = get_random_secret_key() + self.assertEqual(len(key), 50) + for char in key: + self.assertIn(char, "abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)") + + def test_is_ignored_path_true(self): + patterns = ( + ["foo/bar/baz"], + ["baz"], + ["foo/bar/baz"], + ["*/baz"], + ["*"], + ["b?z"], + ["[abc]az"], + ["*/ba[!z]/baz"], + ) + for ignore_patterns in patterns: + with self.subTest(ignore_patterns=ignore_patterns): + self.assertIs( + is_ignored_path("foo/bar/baz", ignore_patterns=ignore_patterns), + True, + ) + + def test_is_ignored_path_false(self): + self.assertIs( + is_ignored_path( + "foo/bar/baz", ignore_patterns=["foo/bar/bat", "bar", "flub/blub"] + ), + False, + ) + + def test_normalize_path_patterns_truncates_wildcard_base(self): + expected = [os.path.normcase(p) for p in ["foo/bar", "bar/*/"]] + self.assertEqual(normalize_path_patterns(["foo/bar/*", "bar/*/"]), expected) diff --git a/testbed/django__django/tests/user_commands/urls.py b/testbed/django__django/tests/user_commands/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..6e82c7dcb3b9bcd4c50775ac9b901377db47072d --- /dev/null +++ b/testbed/django__django/tests/user_commands/urls.py @@ -0,0 +1,5 @@ +from django.urls import path + +urlpatterns = [ + path("some/url/", lambda req: req, name="some_url"), +] diff --git a/testbed/django__django/tests/utils_tests/__init__.py b/testbed/django__django/tests/utils_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/utils_tests/deconstructible_classes.py b/testbed/django__django/tests/utils_tests/deconstructible_classes.py new file mode 100644 index 0000000000000000000000000000000000000000..d1fb8fffc1206227672209e8d0335ae823cac56f --- /dev/null +++ b/testbed/django__django/tests/utils_tests/deconstructible_classes.py @@ -0,0 +1 @@ +from .test_deconstruct import DeconstructibleWithPathClass # NOQA diff --git a/testbed/django__django/tests/utils_tests/eggs/test_egg.egg b/testbed/django__django/tests/utils_tests/eggs/test_egg.egg new file mode 100644 index 0000000000000000000000000000000000000000..9b08cc10eff413ec883186fdba4eb7b7707fff10 Binary files /dev/null and b/testbed/django__django/tests/utils_tests/eggs/test_egg.egg differ diff --git a/testbed/django__django/tests/utils_tests/files/strip_tags1.html b/testbed/django__django/tests/utils_tests/files/strip_tags1.html new file mode 100644 index 0000000000000000000000000000000000000000..476fc052aa1526fd7d9eee133a7a7d80d54fccef --- /dev/null +++ b/testbed/django__django/tests/utils_tests/files/strip_tags1.html @@ -0,0 +1,1302 @@ + + + + + + Improved regex in strip_tags · d7504a3 · django/django + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    + + + + + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + +
    + + + +
    + + Test string that has not been stripped. + +
    + + + + + + + + diff --git a/testbed/django__django/tests/utils_tests/files/strip_tags2.txt b/testbed/django__django/tests/utils_tests/files/strip_tags2.txt new file mode 100644 index 0000000000000000000000000000000000000000..2cbc6c5610a15ec1e3657140a30382d2fb44d30f --- /dev/null +++ b/testbed/django__django/tests/utils_tests/files/strip_tags2.txt @@ -0,0 +1,39 @@ +_**Lorem:** ipsum dolor sit [amet](https://example.com), consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et [dolore magna aliqua](https://example.com)._ + +_Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea [commodo consequat](https://github.com)._ +_____________________________________________________________ +Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. + +Volutpat +-------- +Volutpat est `velit` egestas (*egestas*) dui id ornare arcu odio ut. [*Non*](https://example.com) sodales neque `sodales`'s ut `etiam()`. + +1. Dignissim enim sit amet venenatis urna. +2. Scelerisque fermentum dui faucibus in ornare quam viverra orci. + + +Tristique +--------- + +
    ![image](https://placekitten.com/200/200)
    + +Tristique magna sit amet purus gravida quis blandit turpis. +**Note:** Rhoncus urna neque viverra justo nec. + +### Fermentum dui faucibus + +Fermentum dui faucibus in ornare quam viverra orci: + +1. Sed vulputate mi sit amet mauris commodo quis imperdiet. +2. Quisque non tellus orci ac. +3. Neque volutpat ac tincidunt vitae semper quis lectus. + +#### Aliquet eget sit amet tellus cras adipiscing enim eu. +##### Sodales ut etiam sit amet nisl purus in mollis nunc. +Interdum velit euismod in pellentesque massa placerat. Iaculis urna id volutpat lacus. Non consectetur a erat nam at lectus urna. Arcu non odio euismod lacinia at quis risus. Et pharetra pharetra massa massa ultricies mi. + +> [Diam quis enim lobortis scelerisque.](https://example.com) + + + +Test string that has not been stripped. diff --git a/testbed/django__django/tests/utils_tests/models.py b/testbed/django__django/tests/utils_tests/models.py new file mode 100644 index 0000000000000000000000000000000000000000..866a37debc4418f2f455a02f0065f281ad385a8b --- /dev/null +++ b/testbed/django__django/tests/utils_tests/models.py @@ -0,0 +1,9 @@ +from django.db import models + + +class Category(models.Model): + name = models.CharField(max_length=100) + + +class CategoryInfo(models.Model): + category = models.OneToOneField(Category, models.CASCADE) diff --git a/testbed/django__django/tests/utils_tests/test_archive.py b/testbed/django__django/tests/utils_tests/test_archive.py new file mode 100644 index 0000000000000000000000000000000000000000..8cd107063f9c74b34ef49ff9d61caaa66c691590 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_archive.py @@ -0,0 +1,98 @@ +import os +import stat +import sys +import tempfile +import unittest + +from django.core.exceptions import SuspiciousOperation +from django.test import SimpleTestCase +from django.utils import archive + +try: + import bz2 # NOQA + + HAS_BZ2 = True +except ImportError: + HAS_BZ2 = False + +try: + import lzma # NOQA + + HAS_LZMA = True +except ImportError: + HAS_LZMA = False + + +class TestArchive(unittest.TestCase): + def setUp(self): + self.testdir = os.path.join(os.path.dirname(__file__), "archives") + self.old_cwd = os.getcwd() + os.chdir(self.testdir) + + def tearDown(self): + os.chdir(self.old_cwd) + + def test_extract_function(self): + with os.scandir(self.testdir) as entries: + for entry in entries: + with self.subTest(entry.name), tempfile.TemporaryDirectory() as tmpdir: + if (entry.name.endswith(".bz2") and not HAS_BZ2) or ( + entry.name.endswith((".lzma", ".xz")) and not HAS_LZMA + ): + continue + archive.extract(entry.path, tmpdir) + self.assertTrue(os.path.isfile(os.path.join(tmpdir, "1"))) + self.assertTrue(os.path.isfile(os.path.join(tmpdir, "2"))) + self.assertTrue(os.path.isfile(os.path.join(tmpdir, "foo", "1"))) + self.assertTrue(os.path.isfile(os.path.join(tmpdir, "foo", "2"))) + self.assertTrue( + os.path.isfile(os.path.join(tmpdir, "foo", "bar", "1")) + ) + self.assertTrue( + os.path.isfile(os.path.join(tmpdir, "foo", "bar", "2")) + ) + + @unittest.skipIf( + sys.platform == "win32", "Python on Windows has a limited os.chmod()." + ) + def test_extract_file_permissions(self): + """archive.extract() preserves file permissions.""" + mask = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO + umask = os.umask(0) + os.umask(umask) # Restore the original umask. + with os.scandir(self.testdir) as entries: + for entry in entries: + if ( + entry.name.startswith("leadpath_") + or (entry.name.endswith(".bz2") and not HAS_BZ2) + or (entry.name.endswith((".lzma", ".xz")) and not HAS_LZMA) + ): + continue + with self.subTest(entry.name), tempfile.TemporaryDirectory() as tmpdir: + archive.extract(entry.path, tmpdir) + # An executable file in the archive has executable + # permissions. + filepath = os.path.join(tmpdir, "executable") + self.assertEqual(os.stat(filepath).st_mode & mask, 0o775) + # A file is readable even if permission data is missing. + filepath = os.path.join(tmpdir, "no_permissions") + self.assertEqual(os.stat(filepath).st_mode & mask, 0o666 & ~umask) + + +class TestArchiveInvalid(SimpleTestCase): + def test_extract_function_traversal(self): + archives_dir = os.path.join(os.path.dirname(__file__), "traversal_archives") + tests = [ + ("traversal.tar", ".."), + ("traversal_absolute.tar", "/tmp/evil.py"), + ] + if sys.platform == "win32": + tests += [ + ("traversal_disk_win.tar", "d:evil.py"), + ("traversal_disk_win.zip", "d:evil.py"), + ] + msg = "Archive contains invalid path: '%s'" + for entry, invalid_path in tests: + with self.subTest(entry), tempfile.TemporaryDirectory() as tmpdir: + with self.assertRaisesMessage(SuspiciousOperation, msg % invalid_path): + archive.extract(os.path.join(archives_dir, entry), tmpdir) diff --git a/testbed/django__django/tests/utils_tests/test_autoreload.py b/testbed/django__django/tests/utils_tests/test_autoreload.py new file mode 100644 index 0000000000000000000000000000000000000000..e33276ba6121ce2ea024713fc13f5280169dd986 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_autoreload.py @@ -0,0 +1,884 @@ +import contextlib +import os +import py_compile +import shutil +import sys +import tempfile +import threading +import time +import types +import weakref +import zipfile +import zoneinfo +from importlib import import_module +from pathlib import Path +from subprocess import CompletedProcess +from unittest import mock, skip, skipIf + +import django.__main__ +from django.apps.registry import Apps +from django.test import SimpleTestCase +from django.test.utils import extend_sys_path +from django.utils import autoreload +from django.utils.autoreload import WatchmanUnavailable + +from .test_module import __main__ as test_main +from .test_module import main_module as test_main_module +from .utils import on_macos_with_hfs + + +class TestIterModulesAndFiles(SimpleTestCase): + def import_and_cleanup(self, name): + import_module(name) + self.addCleanup(lambda: sys.path_importer_cache.clear()) + self.addCleanup(lambda: sys.modules.pop(name, None)) + + def clear_autoreload_caches(self): + autoreload.iter_modules_and_files.cache_clear() + + def assertFileFound(self, filename): + # Some temp directories are symlinks. Python resolves these fully while + # importing. + resolved_filename = filename.resolve(strict=True) + self.clear_autoreload_caches() + # Test uncached access + self.assertIn( + resolved_filename, list(autoreload.iter_all_python_module_files()) + ) + # Test cached access + self.assertIn( + resolved_filename, list(autoreload.iter_all_python_module_files()) + ) + self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1) + + def assertFileNotFound(self, filename): + resolved_filename = filename.resolve(strict=True) + self.clear_autoreload_caches() + # Test uncached access + self.assertNotIn( + resolved_filename, list(autoreload.iter_all_python_module_files()) + ) + # Test cached access + self.assertNotIn( + resolved_filename, list(autoreload.iter_all_python_module_files()) + ) + self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1) + + def temporary_file(self, filename): + dirname = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, dirname) + return Path(dirname) / filename + + def test_paths_are_pathlib_instances(self): + for filename in autoreload.iter_all_python_module_files(): + self.assertIsInstance(filename, Path) + + def test_file_added(self): + """ + When a file is added, it's returned by iter_all_python_module_files(). + """ + filename = self.temporary_file("test_deleted_removed_module.py") + filename.touch() + + with extend_sys_path(str(filename.parent)): + self.import_and_cleanup("test_deleted_removed_module") + + self.assertFileFound(filename.absolute()) + + def test_check_errors(self): + """ + When a file containing an error is imported in a function wrapped by + check_errors(), gen_filenames() returns it. + """ + filename = self.temporary_file("test_syntax_error.py") + filename.write_text("Ceci n'est pas du Python.") + + with extend_sys_path(str(filename.parent)): + try: + with self.assertRaises(SyntaxError): + autoreload.check_errors(import_module)("test_syntax_error") + finally: + autoreload._exception = None + self.assertFileFound(filename) + + def test_check_errors_catches_all_exceptions(self): + """ + Since Python may raise arbitrary exceptions when importing code, + check_errors() must catch Exception, not just some subclasses. + """ + filename = self.temporary_file("test_exception.py") + filename.write_text("raise Exception") + with extend_sys_path(str(filename.parent)): + try: + with self.assertRaises(Exception): + autoreload.check_errors(import_module)("test_exception") + finally: + autoreload._exception = None + self.assertFileFound(filename) + + def test_zip_reload(self): + """ + Modules imported from zipped files have their archive location included + in the result. + """ + zip_file = self.temporary_file("zip_import.zip") + with zipfile.ZipFile(str(zip_file), "w", zipfile.ZIP_DEFLATED) as zipf: + zipf.writestr("test_zipped_file.py", "") + + with extend_sys_path(str(zip_file)): + self.import_and_cleanup("test_zipped_file") + self.assertFileFound(zip_file) + + def test_bytecode_conversion_to_source(self): + """.pyc and .pyo files are included in the files list.""" + filename = self.temporary_file("test_compiled.py") + filename.touch() + compiled_file = Path( + py_compile.compile(str(filename), str(filename.with_suffix(".pyc"))) + ) + filename.unlink() + with extend_sys_path(str(compiled_file.parent)): + self.import_and_cleanup("test_compiled") + self.assertFileFound(compiled_file) + + def test_weakref_in_sys_module(self): + """iter_all_python_module_file() ignores weakref modules.""" + time_proxy = weakref.proxy(time) + sys.modules["time_proxy"] = time_proxy + self.addCleanup(lambda: sys.modules.pop("time_proxy", None)) + list(autoreload.iter_all_python_module_files()) # No crash. + + def test_module_without_spec(self): + module = types.ModuleType("test_module") + del module.__spec__ + self.assertEqual( + autoreload.iter_modules_and_files((module,), frozenset()), frozenset() + ) + + def test_main_module_is_resolved(self): + main_module = sys.modules["__main__"] + self.assertFileFound(Path(main_module.__file__)) + + def test_main_module_without_file_is_not_resolved(self): + fake_main = types.ModuleType("__main__") + self.assertEqual( + autoreload.iter_modules_and_files((fake_main,), frozenset()), frozenset() + ) + + def test_path_with_embedded_null_bytes(self): + for path in ( + "embedded_null_byte\x00.py", + "di\x00rectory/embedded_null_byte.py", + ): + with self.subTest(path=path): + self.assertEqual( + autoreload.iter_modules_and_files((), frozenset([path])), + frozenset(), + ) + + +class TestChildArguments(SimpleTestCase): + @mock.patch.dict(sys.modules, {"__main__": django.__main__}) + @mock.patch("sys.argv", [django.__main__.__file__, "runserver"]) + @mock.patch("sys.warnoptions", []) + @mock.patch("sys._xoptions", {}) + def test_run_as_module(self): + self.assertEqual( + autoreload.get_child_arguments(), + [sys.executable, "-m", "django", "runserver"], + ) + + @mock.patch.dict(sys.modules, {"__main__": test_main}) + @mock.patch("sys.argv", [test_main.__file__, "runserver"]) + @mock.patch("sys.warnoptions", []) + @mock.patch("sys._xoptions", {}) + def test_run_as_non_django_module(self): + self.assertEqual( + autoreload.get_child_arguments(), + [sys.executable, "-m", "utils_tests.test_module", "runserver"], + ) + + @mock.patch.dict(sys.modules, {"__main__": test_main_module}) + @mock.patch("sys.argv", [test_main.__file__, "runserver"]) + @mock.patch("sys.warnoptions", []) + @mock.patch("sys._xoptions", {}) + def test_run_as_non_django_module_non_package(self): + self.assertEqual( + autoreload.get_child_arguments(), + [sys.executable, "-m", "utils_tests.test_module.main_module", "runserver"], + ) + + @mock.patch("__main__.__spec__", None) + @mock.patch("sys.argv", [__file__, "runserver"]) + @mock.patch("sys.warnoptions", ["error"]) + @mock.patch("sys._xoptions", {}) + def test_warnoptions(self): + self.assertEqual( + autoreload.get_child_arguments(), + [sys.executable, "-Werror", __file__, "runserver"], + ) + + @mock.patch("sys.argv", [__file__, "runserver"]) + @mock.patch("sys.warnoptions", []) + @mock.patch("sys._xoptions", {"utf8": True, "a": "b"}) + def test_xoptions(self): + self.assertEqual( + autoreload.get_child_arguments(), + [sys.executable, "-Xutf8", "-Xa=b", __file__, "runserver"], + ) + + @mock.patch("__main__.__spec__", None) + @mock.patch("sys.warnoptions", []) + def test_exe_fallback(self): + with tempfile.TemporaryDirectory() as tmpdir: + exe_path = Path(tmpdir) / "django-admin.exe" + exe_path.touch() + with mock.patch("sys.argv", [exe_path.with_suffix(""), "runserver"]): + self.assertEqual( + autoreload.get_child_arguments(), [exe_path, "runserver"] + ) + + @mock.patch("__main__.__spec__", None) + @mock.patch("sys.warnoptions", []) + @mock.patch("sys._xoptions", {}) + def test_entrypoint_fallback(self): + with tempfile.TemporaryDirectory() as tmpdir: + script_path = Path(tmpdir) / "django-admin-script.py" + script_path.touch() + with mock.patch( + "sys.argv", [script_path.with_name("django-admin"), "runserver"] + ): + self.assertEqual( + autoreload.get_child_arguments(), + [sys.executable, script_path, "runserver"], + ) + + @mock.patch("__main__.__spec__", None) + @mock.patch("sys.argv", ["does-not-exist", "runserver"]) + @mock.patch("sys.warnoptions", []) + def test_raises_runtimeerror(self): + msg = "Script does-not-exist does not exist." + with self.assertRaisesMessage(RuntimeError, msg): + autoreload.get_child_arguments() + + @mock.patch("sys.argv", [__file__, "runserver"]) + @mock.patch("sys.warnoptions", []) + @mock.patch("sys._xoptions", {}) + def test_module_no_spec(self): + module = types.ModuleType("test_module") + del module.__spec__ + with mock.patch.dict(sys.modules, {"__main__": module}): + self.assertEqual( + autoreload.get_child_arguments(), + [sys.executable, __file__, "runserver"], + ) + + +class TestUtilities(SimpleTestCase): + def test_is_django_module(self): + for module, expected in ((zoneinfo, False), (sys, False), (autoreload, True)): + with self.subTest(module=module): + self.assertIs(autoreload.is_django_module(module), expected) + + def test_is_django_path(self): + for module, expected in ( + (zoneinfo.__file__, False), + (contextlib.__file__, False), + (autoreload.__file__, True), + ): + with self.subTest(module=module): + self.assertIs(autoreload.is_django_path(module), expected) + + +class TestCommonRoots(SimpleTestCase): + def test_common_roots(self): + paths = ( + Path("/first/second"), + Path("/first/second/third"), + Path("/first/"), + Path("/root/first/"), + ) + results = autoreload.common_roots(paths) + self.assertCountEqual(results, [Path("/first/"), Path("/root/first/")]) + + +class TestSysPathDirectories(SimpleTestCase): + def setUp(self): + self._directory = tempfile.TemporaryDirectory() + self.directory = Path(self._directory.name).resolve(strict=True).absolute() + self.file = self.directory / "test" + self.file.touch() + + def tearDown(self): + self._directory.cleanup() + + def test_sys_paths_with_directories(self): + with extend_sys_path(str(self.file)): + paths = list(autoreload.sys_path_directories()) + self.assertIn(self.file.parent, paths) + + def test_sys_paths_non_existing(self): + nonexistent_file = Path(self.directory.name) / "does_not_exist" + with extend_sys_path(str(nonexistent_file)): + paths = list(autoreload.sys_path_directories()) + self.assertNotIn(nonexistent_file, paths) + self.assertNotIn(nonexistent_file.parent, paths) + + def test_sys_paths_absolute(self): + paths = list(autoreload.sys_path_directories()) + self.assertTrue(all(p.is_absolute() for p in paths)) + + def test_sys_paths_directories(self): + with extend_sys_path(str(self.directory)): + paths = list(autoreload.sys_path_directories()) + self.assertIn(self.directory, paths) + + +class GetReloaderTests(SimpleTestCase): + @mock.patch("django.utils.autoreload.WatchmanReloader") + def test_watchman_unavailable(self, mocked_watchman): + mocked_watchman.check_availability.side_effect = WatchmanUnavailable + self.assertIsInstance(autoreload.get_reloader(), autoreload.StatReloader) + + @mock.patch.object(autoreload.WatchmanReloader, "check_availability") + def test_watchman_available(self, mocked_available): + # If WatchmanUnavailable isn't raised, Watchman will be chosen. + mocked_available.return_value = None + result = autoreload.get_reloader() + self.assertIsInstance(result, autoreload.WatchmanReloader) + + +class RunWithReloaderTests(SimpleTestCase): + @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: "true"}) + @mock.patch("django.utils.autoreload.get_reloader") + def test_swallows_keyboard_interrupt(self, mocked_get_reloader): + mocked_get_reloader.side_effect = KeyboardInterrupt() + autoreload.run_with_reloader(lambda: None) # No exception + + @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: "false"}) + @mock.patch("django.utils.autoreload.restart_with_reloader") + def test_calls_sys_exit(self, mocked_restart_reloader): + mocked_restart_reloader.return_value = 1 + with self.assertRaises(SystemExit) as exc: + autoreload.run_with_reloader(lambda: None) + self.assertEqual(exc.exception.code, 1) + + @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: "true"}) + @mock.patch("django.utils.autoreload.start_django") + @mock.patch("django.utils.autoreload.get_reloader") + def test_calls_start_django(self, mocked_reloader, mocked_start_django): + mocked_reloader.return_value = mock.sentinel.RELOADER + autoreload.run_with_reloader(mock.sentinel.METHOD) + self.assertEqual(mocked_start_django.call_count, 1) + self.assertSequenceEqual( + mocked_start_django.call_args[0], + [mock.sentinel.RELOADER, mock.sentinel.METHOD], + ) + + +class StartDjangoTests(SimpleTestCase): + @mock.patch("django.utils.autoreload.ensure_echo_on") + def test_echo_on_called(self, mocked_echo): + fake_reloader = mock.MagicMock() + autoreload.start_django(fake_reloader, lambda: None) + self.assertEqual(mocked_echo.call_count, 1) + + @mock.patch("django.utils.autoreload.check_errors") + def test_check_errors_called(self, mocked_check_errors): + fake_method = mock.MagicMock(return_value=None) + fake_reloader = mock.MagicMock() + autoreload.start_django(fake_reloader, fake_method) + self.assertCountEqual(mocked_check_errors.call_args[0], [fake_method]) + + @mock.patch("threading.Thread") + @mock.patch("django.utils.autoreload.check_errors") + def test_starts_thread_with_args(self, mocked_check_errors, mocked_thread): + fake_reloader = mock.MagicMock() + fake_main_func = mock.MagicMock() + fake_thread = mock.MagicMock() + mocked_check_errors.return_value = fake_main_func + mocked_thread.return_value = fake_thread + autoreload.start_django(fake_reloader, fake_main_func, 123, abc=123) + self.assertEqual(mocked_thread.call_count, 1) + self.assertEqual( + mocked_thread.call_args[1], + { + "target": fake_main_func, + "args": (123,), + "kwargs": {"abc": 123}, + "name": "django-main-thread", + }, + ) + self.assertIs(fake_thread.daemon, True) + self.assertTrue(fake_thread.start.called) + + +class TestCheckErrors(SimpleTestCase): + def test_mutates_error_files(self): + fake_method = mock.MagicMock(side_effect=RuntimeError()) + wrapped = autoreload.check_errors(fake_method) + with mock.patch.object(autoreload, "_error_files") as mocked_error_files: + try: + with self.assertRaises(RuntimeError): + wrapped() + finally: + autoreload._exception = None + self.assertEqual(mocked_error_files.append.call_count, 1) + + +class TestRaiseLastException(SimpleTestCase): + @mock.patch("django.utils.autoreload._exception", None) + def test_no_exception(self): + # Should raise no exception if _exception is None + autoreload.raise_last_exception() + + def test_raises_exception(self): + class MyException(Exception): + pass + + # Create an exception + try: + raise MyException("Test Message") + except MyException: + exc_info = sys.exc_info() + + with mock.patch("django.utils.autoreload._exception", exc_info): + with self.assertRaisesMessage(MyException, "Test Message"): + autoreload.raise_last_exception() + + def test_raises_custom_exception(self): + class MyException(Exception): + def __init__(self, msg, extra_context): + super().__init__(msg) + self.extra_context = extra_context + + # Create an exception. + try: + raise MyException("Test Message", "extra context") + except MyException: + exc_info = sys.exc_info() + + with mock.patch("django.utils.autoreload._exception", exc_info): + with self.assertRaisesMessage(MyException, "Test Message"): + autoreload.raise_last_exception() + + def test_raises_exception_with_context(self): + try: + raise Exception(2) + except Exception as e: + try: + raise Exception(1) from e + except Exception: + exc_info = sys.exc_info() + + with mock.patch("django.utils.autoreload._exception", exc_info): + with self.assertRaises(Exception) as cm: + autoreload.raise_last_exception() + self.assertEqual(cm.exception.args[0], 1) + self.assertEqual(cm.exception.__cause__.args[0], 2) + + +class RestartWithReloaderTests(SimpleTestCase): + executable = "/usr/bin/python" + + def patch_autoreload(self, argv): + patch_call = mock.patch( + "django.utils.autoreload.subprocess.run", + return_value=CompletedProcess(argv, 0), + ) + patches = [ + mock.patch("django.utils.autoreload.sys.argv", argv), + mock.patch("django.utils.autoreload.sys.executable", self.executable), + mock.patch("django.utils.autoreload.sys.warnoptions", ["all"]), + mock.patch("django.utils.autoreload.sys._xoptions", {}), + ] + for p in patches: + p.start() + self.addCleanup(p.stop) + mock_call = patch_call.start() + self.addCleanup(patch_call.stop) + return mock_call + + def test_manage_py(self): + with tempfile.TemporaryDirectory() as temp_dir: + script = Path(temp_dir) / "manage.py" + script.touch() + argv = [str(script), "runserver"] + mock_call = self.patch_autoreload(argv) + with mock.patch("__main__.__spec__", None): + autoreload.restart_with_reloader() + self.assertEqual(mock_call.call_count, 1) + self.assertEqual( + mock_call.call_args[0][0], + [self.executable, "-Wall"] + argv, + ) + + def test_python_m_django(self): + main = "/usr/lib/pythonX.Y/site-packages/django/__main__.py" + argv = [main, "runserver"] + mock_call = self.patch_autoreload(argv) + with mock.patch("django.__main__.__file__", main): + with mock.patch.dict(sys.modules, {"__main__": django.__main__}): + autoreload.restart_with_reloader() + self.assertEqual(mock_call.call_count, 1) + self.assertEqual( + mock_call.call_args[0][0], + [self.executable, "-Wall", "-m", "django"] + argv[1:], + ) + + +class ReloaderTests(SimpleTestCase): + RELOADER_CLS = None + + def setUp(self): + self._tempdir = tempfile.TemporaryDirectory() + self.tempdir = Path(self._tempdir.name).resolve(strict=True).absolute() + self.existing_file = self.ensure_file(self.tempdir / "test.py") + self.nonexistent_file = (self.tempdir / "does_not_exist.py").absolute() + self.reloader = self.RELOADER_CLS() + + def tearDown(self): + self._tempdir.cleanup() + self.reloader.stop() + + def ensure_file(self, path): + path.parent.mkdir(exist_ok=True, parents=True) + path.touch() + # On Linux and Windows updating the mtime of a file using touch() will + # set a timestamp value that is in the past, as the time value for the + # last kernel tick is used rather than getting the correct absolute + # time. + # To make testing simpler set the mtime to be the observed time when + # this function is called. + self.set_mtime(path, time.time()) + return path.absolute() + + def set_mtime(self, fp, value): + os.utime(str(fp), (value, value)) + + def increment_mtime(self, fp, by=1): + current_time = time.time() + self.set_mtime(fp, current_time + by) + + @contextlib.contextmanager + def tick_twice(self): + ticker = self.reloader.tick() + next(ticker) + yield + next(ticker) + + +class IntegrationTests: + @mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed") + @mock.patch( + "django.utils.autoreload.iter_all_python_module_files", return_value=frozenset() + ) + def test_glob(self, mocked_modules, notify_mock): + non_py_file = self.ensure_file(self.tempdir / "non_py_file") + self.reloader.watch_dir(self.tempdir, "*.py") + with self.tick_twice(): + self.increment_mtime(non_py_file) + self.increment_mtime(self.existing_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) + + @mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed") + @mock.patch( + "django.utils.autoreload.iter_all_python_module_files", return_value=frozenset() + ) + def test_multiple_globs(self, mocked_modules, notify_mock): + self.ensure_file(self.tempdir / "x.test") + self.reloader.watch_dir(self.tempdir, "*.py") + self.reloader.watch_dir(self.tempdir, "*.test") + with self.tick_twice(): + self.increment_mtime(self.existing_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) + + @mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed") + @mock.patch( + "django.utils.autoreload.iter_all_python_module_files", return_value=frozenset() + ) + def test_overlapping_globs(self, mocked_modules, notify_mock): + self.reloader.watch_dir(self.tempdir, "*.py") + self.reloader.watch_dir(self.tempdir, "*.p*") + with self.tick_twice(): + self.increment_mtime(self.existing_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) + + @mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed") + @mock.patch( + "django.utils.autoreload.iter_all_python_module_files", return_value=frozenset() + ) + def test_glob_recursive(self, mocked_modules, notify_mock): + non_py_file = self.ensure_file(self.tempdir / "dir" / "non_py_file") + py_file = self.ensure_file(self.tempdir / "dir" / "file.py") + self.reloader.watch_dir(self.tempdir, "**/*.py") + with self.tick_twice(): + self.increment_mtime(non_py_file) + self.increment_mtime(py_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [py_file]) + + @mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed") + @mock.patch( + "django.utils.autoreload.iter_all_python_module_files", return_value=frozenset() + ) + def test_multiple_recursive_globs(self, mocked_modules, notify_mock): + non_py_file = self.ensure_file(self.tempdir / "dir" / "test.txt") + py_file = self.ensure_file(self.tempdir / "dir" / "file.py") + self.reloader.watch_dir(self.tempdir, "**/*.txt") + self.reloader.watch_dir(self.tempdir, "**/*.py") + with self.tick_twice(): + self.increment_mtime(non_py_file) + self.increment_mtime(py_file) + self.assertEqual(notify_mock.call_count, 2) + self.assertCountEqual( + notify_mock.call_args_list, [mock.call(py_file), mock.call(non_py_file)] + ) + + @mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed") + @mock.patch( + "django.utils.autoreload.iter_all_python_module_files", return_value=frozenset() + ) + def test_nested_glob_recursive(self, mocked_modules, notify_mock): + inner_py_file = self.ensure_file(self.tempdir / "dir" / "file.py") + self.reloader.watch_dir(self.tempdir, "**/*.py") + self.reloader.watch_dir(inner_py_file.parent, "**/*.py") + with self.tick_twice(): + self.increment_mtime(inner_py_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [inner_py_file]) + + @mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed") + @mock.patch( + "django.utils.autoreload.iter_all_python_module_files", return_value=frozenset() + ) + def test_overlapping_glob_recursive(self, mocked_modules, notify_mock): + py_file = self.ensure_file(self.tempdir / "dir" / "file.py") + self.reloader.watch_dir(self.tempdir, "**/*.p*") + self.reloader.watch_dir(self.tempdir, "**/*.py*") + with self.tick_twice(): + self.increment_mtime(py_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [py_file]) + + +class BaseReloaderTests(ReloaderTests): + RELOADER_CLS = autoreload.BaseReloader + + def test_watch_dir_with_unresolvable_path(self): + path = Path("unresolvable_directory") + with mock.patch.object(Path, "absolute", side_effect=FileNotFoundError): + self.reloader.watch_dir(path, "**/*.mo") + self.assertEqual(list(self.reloader.directory_globs), []) + + def test_watch_with_glob(self): + self.reloader.watch_dir(self.tempdir, "*.py") + watched_files = list(self.reloader.watched_files()) + self.assertIn(self.existing_file, watched_files) + + def test_watch_files_with_recursive_glob(self): + inner_file = self.ensure_file(self.tempdir / "test" / "test.py") + self.reloader.watch_dir(self.tempdir, "**/*.py") + watched_files = list(self.reloader.watched_files()) + self.assertIn(self.existing_file, watched_files) + self.assertIn(inner_file, watched_files) + + def test_run_loop_catches_stopiteration(self): + def mocked_tick(): + yield + + with mock.patch.object(self.reloader, "tick", side_effect=mocked_tick) as tick: + self.reloader.run_loop() + self.assertEqual(tick.call_count, 1) + + def test_run_loop_stop_and_return(self): + def mocked_tick(*args): + yield + self.reloader.stop() + return # Raises StopIteration + + with mock.patch.object(self.reloader, "tick", side_effect=mocked_tick) as tick: + self.reloader.run_loop() + + self.assertEqual(tick.call_count, 1) + + def test_wait_for_apps_ready_checks_for_exception(self): + app_reg = Apps() + app_reg.ready_event.set() + # thread.is_alive() is False if it's not started. + dead_thread = threading.Thread() + self.assertFalse(self.reloader.wait_for_apps_ready(app_reg, dead_thread)) + + def test_wait_for_apps_ready_without_exception(self): + app_reg = Apps() + app_reg.ready_event.set() + thread = mock.MagicMock() + thread.is_alive.return_value = True + self.assertTrue(self.reloader.wait_for_apps_ready(app_reg, thread)) + + +def skip_unless_watchman_available(): + try: + autoreload.WatchmanReloader.check_availability() + except WatchmanUnavailable as e: + return skip("Watchman unavailable: %s" % e) + return lambda func: func + + +@skip_unless_watchman_available() +class WatchmanReloaderTests(ReloaderTests, IntegrationTests): + RELOADER_CLS = autoreload.WatchmanReloader + + def setUp(self): + super().setUp() + # Shorten the timeout to speed up tests. + self.reloader.client_timeout = int(os.environ.get("DJANGO_WATCHMAN_TIMEOUT", 2)) + + def test_watch_glob_ignores_non_existing_directories_two_levels(self): + with mock.patch.object(self.reloader, "_subscribe") as mocked_subscribe: + self.reloader._watch_glob(self.tempdir / "does_not_exist" / "more", ["*"]) + self.assertFalse(mocked_subscribe.called) + + def test_watch_glob_uses_existing_parent_directories(self): + with mock.patch.object(self.reloader, "_subscribe") as mocked_subscribe: + self.reloader._watch_glob(self.tempdir / "does_not_exist", ["*"]) + self.assertSequenceEqual( + mocked_subscribe.call_args[0], + [ + self.tempdir, + "glob-parent-does_not_exist:%s" % self.tempdir, + ["anyof", ["match", "does_not_exist/*", "wholename"]], + ], + ) + + def test_watch_glob_multiple_patterns(self): + with mock.patch.object(self.reloader, "_subscribe") as mocked_subscribe: + self.reloader._watch_glob(self.tempdir, ["*", "*.py"]) + self.assertSequenceEqual( + mocked_subscribe.call_args[0], + [ + self.tempdir, + "glob:%s" % self.tempdir, + ["anyof", ["match", "*", "wholename"], ["match", "*.py", "wholename"]], + ], + ) + + def test_watched_roots_contains_files(self): + paths = self.reloader.watched_roots([self.existing_file]) + self.assertIn(self.existing_file.parent, paths) + + def test_watched_roots_contains_directory_globs(self): + self.reloader.watch_dir(self.tempdir, "*.py") + paths = self.reloader.watched_roots([]) + self.assertIn(self.tempdir, paths) + + def test_watched_roots_contains_sys_path(self): + with extend_sys_path(str(self.tempdir)): + paths = self.reloader.watched_roots([]) + self.assertIn(self.tempdir, paths) + + def test_check_server_status(self): + self.assertTrue(self.reloader.check_server_status()) + + def test_check_server_status_raises_error(self): + with mock.patch.object(self.reloader.client, "query") as mocked_query: + mocked_query.side_effect = Exception() + with self.assertRaises(autoreload.WatchmanUnavailable): + self.reloader.check_server_status() + + @mock.patch("pywatchman.client") + def test_check_availability(self, mocked_client): + mocked_client().capabilityCheck.side_effect = Exception() + with self.assertRaisesMessage( + WatchmanUnavailable, "Cannot connect to the watchman service" + ): + self.RELOADER_CLS.check_availability() + + @mock.patch("pywatchman.client") + def test_check_availability_lower_version(self, mocked_client): + mocked_client().capabilityCheck.return_value = {"version": "4.8.10"} + with self.assertRaisesMessage( + WatchmanUnavailable, "Watchman 4.9 or later is required." + ): + self.RELOADER_CLS.check_availability() + + def test_pywatchman_not_available(self): + with mock.patch.object(autoreload, "pywatchman") as mocked: + mocked.__bool__.return_value = False + with self.assertRaisesMessage( + WatchmanUnavailable, "pywatchman not installed." + ): + self.RELOADER_CLS.check_availability() + + def test_update_watches_raises_exceptions(self): + class TestException(Exception): + pass + + with mock.patch.object(self.reloader, "_update_watches") as mocked_watches: + with mock.patch.object( + self.reloader, "check_server_status" + ) as mocked_server_status: + mocked_watches.side_effect = TestException() + mocked_server_status.return_value = True + with self.assertRaises(TestException): + self.reloader.update_watches() + self.assertIsInstance( + mocked_server_status.call_args[0][0], TestException + ) + + @mock.patch.dict(os.environ, {"DJANGO_WATCHMAN_TIMEOUT": "10"}) + def test_setting_timeout_from_environment_variable(self): + self.assertEqual(self.RELOADER_CLS().client_timeout, 10) + + +@skipIf(on_macos_with_hfs(), "These tests do not work with HFS+ as a filesystem") +class StatReloaderTests(ReloaderTests, IntegrationTests): + RELOADER_CLS = autoreload.StatReloader + + def setUp(self): + super().setUp() + # Shorten the sleep time to speed up tests. + self.reloader.SLEEP_TIME = 0.01 + + @mock.patch("django.utils.autoreload.StatReloader.notify_file_changed") + def test_tick_does_not_trigger_twice(self, mock_notify_file_changed): + with mock.patch.object( + self.reloader, "watched_files", return_value=[self.existing_file] + ): + ticker = self.reloader.tick() + next(ticker) + self.increment_mtime(self.existing_file) + next(ticker) + next(ticker) + self.assertEqual(mock_notify_file_changed.call_count, 1) + + def test_snapshot_files_ignores_missing_files(self): + with mock.patch.object( + self.reloader, "watched_files", return_value=[self.nonexistent_file] + ): + self.assertEqual(dict(self.reloader.snapshot_files()), {}) + + def test_snapshot_files_updates(self): + with mock.patch.object( + self.reloader, "watched_files", return_value=[self.existing_file] + ): + snapshot1 = dict(self.reloader.snapshot_files()) + self.assertIn(self.existing_file, snapshot1) + self.increment_mtime(self.existing_file) + snapshot2 = dict(self.reloader.snapshot_files()) + self.assertNotEqual( + snapshot1[self.existing_file], snapshot2[self.existing_file] + ) + + def test_snapshot_files_with_duplicates(self): + with mock.patch.object( + self.reloader, + "watched_files", + return_value=[self.existing_file, self.existing_file], + ): + snapshot = list(self.reloader.snapshot_files()) + self.assertEqual(len(snapshot), 1) + self.assertEqual(snapshot[0][0], self.existing_file) diff --git a/testbed/django__django/tests/utils_tests/test_connection.py b/testbed/django__django/tests/utils_tests/test_connection.py new file mode 100644 index 0000000000000000000000000000000000000000..e1e2bbb28c20527933be3c52a0a795cf174b88d9 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_connection.py @@ -0,0 +1,14 @@ +from django.test import SimpleTestCase +from django.utils.connection import BaseConnectionHandler + + +class BaseConnectionHandlerTests(SimpleTestCase): + def test_create_connection(self): + handler = BaseConnectionHandler() + msg = "Subclasses must implement create_connection()." + with self.assertRaisesMessage(NotImplementedError, msg): + handler.create_connection(None) + + def test_all_initialized_only(self): + handler = BaseConnectionHandler({"default": {}}) + self.assertEqual(handler.all(initialized_only=True), []) diff --git a/testbed/django__django/tests/utils_tests/test_crypto.py b/testbed/django__django/tests/utils_tests/test_crypto.py new file mode 100644 index 0000000000000000000000000000000000000000..ed3ed25a9de283ca1ee967211070187c8941ce12 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_crypto.py @@ -0,0 +1,196 @@ +import hashlib +import unittest + +from django.test import SimpleTestCase +from django.utils.crypto import ( + InvalidAlgorithm, + constant_time_compare, + pbkdf2, + salted_hmac, +) + + +class TestUtilsCryptoMisc(SimpleTestCase): + def test_constant_time_compare(self): + # It's hard to test for constant time, just test the result. + self.assertTrue(constant_time_compare(b"spam", b"spam")) + self.assertFalse(constant_time_compare(b"spam", b"eggs")) + self.assertTrue(constant_time_compare("spam", "spam")) + self.assertFalse(constant_time_compare("spam", "eggs")) + + def test_salted_hmac(self): + tests = [ + ((b"salt", b"value"), {}, "b51a2e619c43b1ca4f91d15c57455521d71d61eb"), + (("salt", "value"), {}, "b51a2e619c43b1ca4f91d15c57455521d71d61eb"), + ( + ("salt", "value"), + {"secret": "abcdefg"}, + "8bbee04ccddfa24772d1423a0ba43bd0c0e24b76", + ), + ( + ("salt", "value"), + {"secret": "x" * hashlib.sha1().block_size}, + "bd3749347b412b1b0a9ea65220e55767ac8e96b0", + ), + ( + ("salt", "value"), + {"algorithm": "sha256"}, + "ee0bf789e4e009371a5372c90f73fcf17695a8439c9108b0480f14e347b3f9ec", + ), + ( + ("salt", "value"), + { + "algorithm": "blake2b", + "secret": "x" * hashlib.blake2b().block_size, + }, + "fc6b9800a584d40732a07fa33fb69c35211269441823bca431a143853c32f" + "e836cf19ab881689528ede647dac412170cd5d3407b44c6d0f44630690c54" + "ad3d58", + ), + ] + for args, kwargs, digest in tests: + with self.subTest(args=args, kwargs=kwargs): + self.assertEqual(salted_hmac(*args, **kwargs).hexdigest(), digest) + + def test_invalid_algorithm(self): + msg = "'whatever' is not an algorithm accepted by the hashlib module." + with self.assertRaisesMessage(InvalidAlgorithm, msg): + salted_hmac("salt", "value", algorithm="whatever") + + +class TestUtilsCryptoPBKDF2(unittest.TestCase): + # https://tools.ietf.org/html/draft-josefsson-pbkdf2-test-vectors-06 + rfc_vectors = [ + { + "args": { + "password": "password", + "salt": "salt", + "iterations": 1, + "dklen": 20, + "digest": hashlib.sha1, + }, + "result": "0c60c80f961f0e71f3a9b524af6012062fe037a6", + }, + { + "args": { + "password": "password", + "salt": "salt", + "iterations": 2, + "dklen": 20, + "digest": hashlib.sha1, + }, + "result": "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957", + }, + { + "args": { + "password": "password", + "salt": "salt", + "iterations": 4096, + "dklen": 20, + "digest": hashlib.sha1, + }, + "result": "4b007901b765489abead49d926f721d065a429c1", + }, + # # this takes way too long :( + # { + # "args": { + # "password": "password", + # "salt": "salt", + # "iterations": 16777216, + # "dklen": 20, + # "digest": hashlib.sha1, + # }, + # "result": "eefe3d61cd4da4e4e9945b3d6ba2158c2634e984", + # }, + { + "args": { + "password": "passwordPASSWORDpassword", + "salt": "saltSALTsaltSALTsaltSALTsaltSALTsalt", + "iterations": 4096, + "dklen": 25, + "digest": hashlib.sha1, + }, + "result": "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038", + }, + { + "args": { + "password": "pass\0word", + "salt": "sa\0lt", + "iterations": 4096, + "dklen": 16, + "digest": hashlib.sha1, + }, + "result": "56fa6aa75548099dcc37d7f03425e0c3", + }, + ] + + regression_vectors = [ + { + "args": { + "password": "password", + "salt": "salt", + "iterations": 1, + "dklen": 20, + "digest": hashlib.sha256, + }, + "result": "120fb6cffcf8b32c43e7225256c4f837a86548c9", + }, + { + "args": { + "password": "password", + "salt": "salt", + "iterations": 1, + "dklen": 20, + "digest": hashlib.sha512, + }, + "result": "867f70cf1ade02cff3752599a3a53dc4af34c7a6", + }, + { + "args": { + "password": "password", + "salt": "salt", + "iterations": 1000, + "dklen": 0, + "digest": hashlib.sha512, + }, + "result": ( + "afe6c5530785b6cc6b1c6453384731bd5ee432ee" + "549fd42fb6695779ad8a1c5bf59de69c48f774ef" + "c4007d5298f9033c0241d5ab69305e7b64eceeb8d" + "834cfec" + ), + }, + # Check leading zeros are not stripped (#17481) + { + "args": { + "password": b"\xba", + "salt": "salt", + "iterations": 1, + "dklen": 20, + "digest": hashlib.sha1, + }, + "result": "0053d3b91a7f1e54effebd6d68771e8a6e0b2c5b", + }, + ] + + def test_public_vectors(self): + for vector in self.rfc_vectors: + result = pbkdf2(**vector["args"]) + self.assertEqual(result.hex(), vector["result"]) + + def test_regression_vectors(self): + for vector in self.regression_vectors: + result = pbkdf2(**vector["args"]) + self.assertEqual(result.hex(), vector["result"]) + + def test_default_hmac_alg(self): + kwargs = { + "password": b"password", + "salt": b"salt", + "iterations": 1, + "dklen": 20, + } + self.assertEqual( + pbkdf2(**kwargs), + hashlib.pbkdf2_hmac(hash_name=hashlib.sha256().name, **kwargs), + ) diff --git a/testbed/django__django/tests/utils_tests/test_datastructures.py b/testbed/django__django/tests/utils_tests/test_datastructures.py new file mode 100644 index 0000000000000000000000000000000000000000..07229f25ed9940735eacd57b6d8e64bf6a0f96e8 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_datastructures.py @@ -0,0 +1,368 @@ +""" +Tests for stuff in django.utils.datastructures. +""" +import collections.abc +import copy +import pickle + +from django.test import SimpleTestCase +from django.utils.datastructures import ( + CaseInsensitiveMapping, + DictWrapper, + ImmutableList, + MultiValueDict, + MultiValueDictKeyError, + OrderedSet, +) + + +class OrderedSetTests(SimpleTestCase): + def test_init_with_iterable(self): + s = OrderedSet([1, 2, 3]) + self.assertEqual(list(s.dict.keys()), [1, 2, 3]) + + def test_remove(self): + s = OrderedSet() + self.assertEqual(len(s), 0) + s.add(1) + s.add(2) + s.remove(2) + self.assertEqual(len(s), 1) + self.assertNotIn(2, s) + + def test_discard(self): + s = OrderedSet() + self.assertEqual(len(s), 0) + s.add(1) + s.discard(2) + self.assertEqual(len(s), 1) + + def test_reversed(self): + s = reversed(OrderedSet([1, 2, 3])) + self.assertIsInstance(s, collections.abc.Iterator) + self.assertEqual(list(s), [3, 2, 1]) + + def test_contains(self): + s = OrderedSet() + self.assertEqual(len(s), 0) + s.add(1) + self.assertIn(1, s) + + def test_bool(self): + # Refs #23664 + s = OrderedSet() + self.assertFalse(s) + s.add(1) + self.assertTrue(s) + + def test_len(self): + s = OrderedSet() + self.assertEqual(len(s), 0) + s.add(1) + s.add(2) + s.add(2) + self.assertEqual(len(s), 2) + + def test_repr(self): + self.assertEqual(repr(OrderedSet()), "OrderedSet()") + self.assertEqual(repr(OrderedSet([2, 3, 2, 1])), "OrderedSet([2, 3, 1])") + + +class MultiValueDictTests(SimpleTestCase): + def test_repr(self): + d = MultiValueDict({"key": "value"}) + self.assertEqual(repr(d), "") + + def test_multivaluedict(self): + d = MultiValueDict( + {"name": ["Adrian", "Simon"], "position": ["Developer"], "empty": []} + ) + self.assertEqual(d["name"], "Simon") + self.assertEqual(d.get("name"), "Simon") + self.assertEqual(d.getlist("name"), ["Adrian", "Simon"]) + self.assertEqual( + list(d.items()), + [("name", "Simon"), ("position", "Developer"), ("empty", [])], + ) + self.assertEqual( + list(d.lists()), + [("name", ["Adrian", "Simon"]), ("position", ["Developer"]), ("empty", [])], + ) + with self.assertRaisesMessage(MultiValueDictKeyError, "'lastname'"): + d.__getitem__("lastname") + self.assertIsNone(d.get("empty")) + self.assertEqual(d.get("empty", "nonexistent"), "nonexistent") + self.assertIsNone(d.get("lastname")) + self.assertEqual(d.get("lastname", "nonexistent"), "nonexistent") + self.assertEqual(d.getlist("lastname"), []) + self.assertEqual( + d.getlist("doesnotexist", ["Adrian", "Simon"]), ["Adrian", "Simon"] + ) + d.setlist("lastname", ["Holovaty", "Willison"]) + self.assertEqual(d.getlist("lastname"), ["Holovaty", "Willison"]) + self.assertEqual(list(d.values()), ["Simon", "Developer", [], "Willison"]) + + d.setlistdefault("lastname", ["Doe"]) + self.assertEqual(d.getlist("lastname"), ["Holovaty", "Willison"]) + d.setlistdefault("newkey", ["Doe"]) + self.assertEqual(d.getlist("newkey"), ["Doe"]) + + def test_appendlist(self): + d = MultiValueDict() + d.appendlist("name", "Adrian") + d.appendlist("name", "Simon") + self.assertEqual(d.getlist("name"), ["Adrian", "Simon"]) + + def test_copy(self): + for copy_func in [copy.copy, lambda d: d.copy()]: + with self.subTest(copy_func): + d1 = MultiValueDict({"developers": ["Carl", "Fred"]}) + self.assertEqual(d1["developers"], "Fred") + d2 = copy_func(d1) + d2.update({"developers": "Groucho"}) + self.assertEqual(d2["developers"], "Groucho") + self.assertEqual(d1["developers"], "Fred") + + d1 = MultiValueDict({"key": [[]]}) + self.assertEqual(d1["key"], []) + d2 = copy_func(d1) + d2["key"].append("Penguin") + self.assertEqual(d1["key"], ["Penguin"]) + self.assertEqual(d2["key"], ["Penguin"]) + + def test_deepcopy(self): + d1 = MultiValueDict({"a": [[123]]}) + d2 = copy.copy(d1) + d3 = copy.deepcopy(d1) + self.assertIs(d1["a"], d2["a"]) + self.assertIsNot(d1["a"], d3["a"]) + + def test_pickle(self): + x = MultiValueDict({"a": ["1", "2"], "b": ["3"]}) + self.assertEqual(x, pickle.loads(pickle.dumps(x))) + + def test_dict_translation(self): + mvd = MultiValueDict( + { + "devs": ["Bob", "Joe"], + "pm": ["Rory"], + } + ) + d = mvd.dict() + self.assertEqual(list(d), list(mvd)) + for key in mvd: + self.assertEqual(d[key], mvd[key]) + + self.assertEqual({}, MultiValueDict().dict()) + + def test_getlist_doesnt_mutate(self): + x = MultiValueDict({"a": ["1", "2"], "b": ["3"]}) + values = x.getlist("a") + values += x.getlist("b") + self.assertEqual(x.getlist("a"), ["1", "2"]) + + def test_internal_getlist_does_mutate(self): + x = MultiValueDict({"a": ["1", "2"], "b": ["3"]}) + values = x._getlist("a") + values += x._getlist("b") + self.assertEqual(x._getlist("a"), ["1", "2", "3"]) + + def test_getlist_default(self): + x = MultiValueDict({"a": [1]}) + MISSING = object() + values = x.getlist("b", default=MISSING) + self.assertIs(values, MISSING) + + def test_getlist_none_empty_values(self): + x = MultiValueDict({"a": None, "b": []}) + self.assertIsNone(x.getlist("a")) + self.assertEqual(x.getlist("b"), []) + + def test_setitem(self): + x = MultiValueDict({"a": [1, 2]}) + x["a"] = 3 + self.assertEqual(list(x.lists()), [("a", [3])]) + + def test_setdefault(self): + x = MultiValueDict({"a": [1, 2]}) + a = x.setdefault("a", 3) + b = x.setdefault("b", 3) + self.assertEqual(a, 2) + self.assertEqual(b, 3) + self.assertEqual(list(x.lists()), [("a", [1, 2]), ("b", [3])]) + + def test_update_too_many_args(self): + x = MultiValueDict({"a": []}) + msg = "update expected at most 1 argument, got 2" + with self.assertRaisesMessage(TypeError, msg): + x.update(1, 2) + + def test_update_no_args(self): + x = MultiValueDict({"a": []}) + x.update() + self.assertEqual(list(x.lists()), [("a", [])]) + + def test_update_dict_arg(self): + x = MultiValueDict({"a": [1], "b": [2], "c": [3]}) + x.update({"a": 4, "b": 5}) + self.assertEqual(list(x.lists()), [("a", [1, 4]), ("b", [2, 5]), ("c", [3])]) + + def test_update_multivaluedict_arg(self): + x = MultiValueDict({"a": [1], "b": [2], "c": [3]}) + x.update(MultiValueDict({"a": [4], "b": [5]})) + self.assertEqual(list(x.lists()), [("a", [1, 4]), ("b", [2, 5]), ("c", [3])]) + + def test_update_kwargs(self): + x = MultiValueDict({"a": [1], "b": [2], "c": [3]}) + x.update(a=4, b=5) + self.assertEqual(list(x.lists()), [("a", [1, 4]), ("b", [2, 5]), ("c", [3])]) + + def test_update_with_empty_iterable(self): + for value in ["", b"", (), [], set(), {}]: + d = MultiValueDict() + d.update(value) + self.assertEqual(d, MultiValueDict()) + + def test_update_with_iterable_of_pairs(self): + for value in [(("a", 1),), [("a", 1)], {("a", 1)}]: + d = MultiValueDict() + d.update(value) + self.assertEqual(d, MultiValueDict({"a": [1]})) + + def test_update_raises_correct_exceptions(self): + # MultiValueDict.update() raises equivalent exceptions to + # dict.update(). + # Non-iterable values raise TypeError. + for value in [None, True, False, 123, 123.45]: + with self.subTest(value), self.assertRaises(TypeError): + MultiValueDict().update(value) + # Iterables of objects that cannot be unpacked raise TypeError. + for value in [b"123", b"abc", (1, 2, 3), [1, 2, 3], {1, 2, 3}]: + with self.subTest(value), self.assertRaises(TypeError): + MultiValueDict().update(value) + # Iterables of unpackable objects with incorrect number of items raise + # ValueError. + for value in ["123", "abc", ("a", "b", "c"), ["a", "b", "c"], {"a", "b", "c"}]: + with self.subTest(value), self.assertRaises(ValueError): + MultiValueDict().update(value) + + +class ImmutableListTests(SimpleTestCase): + def test_sort(self): + d = ImmutableList(range(10)) + + # AttributeError: ImmutableList object is immutable. + with self.assertRaisesMessage( + AttributeError, "ImmutableList object is immutable." + ): + d.sort() + + self.assertEqual(repr(d), "(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)") + + def test_custom_warning(self): + d = ImmutableList(range(10), warning="Object is immutable!") + + self.assertEqual(d[1], 1) + + # AttributeError: Object is immutable! + with self.assertRaisesMessage(AttributeError, "Object is immutable!"): + d.__setitem__(1, "test") + + +class DictWrapperTests(SimpleTestCase): + def test_dictwrapper(self): + def f(x): + return "*%s" % x + + d = DictWrapper({"a": "a"}, f, "xx_") + self.assertEqual( + "Normal: %(a)s. Modified: %(xx_a)s" % d, "Normal: a. Modified: *a" + ) + + +class CaseInsensitiveMappingTests(SimpleTestCase): + def setUp(self): + self.dict1 = CaseInsensitiveMapping( + { + "Accept": "application/json", + "content-type": "text/html", + } + ) + + def test_create_with_invalid_values(self): + msg = "dictionary update sequence element #1 has length 4; 2 is required" + with self.assertRaisesMessage(ValueError, msg): + CaseInsensitiveMapping([("Key1", "Val1"), "Key2"]) + + def test_create_with_invalid_key(self): + msg = "Element key 1 invalid, only strings are allowed" + with self.assertRaisesMessage(ValueError, msg): + CaseInsensitiveMapping([(1, "2")]) + + def test_list(self): + self.assertEqual(list(self.dict1), ["Accept", "content-type"]) + + def test_dict(self): + self.assertEqual( + dict(self.dict1), + {"Accept": "application/json", "content-type": "text/html"}, + ) + + def test_repr(self): + dict1 = CaseInsensitiveMapping({"Accept": "application/json"}) + dict2 = CaseInsensitiveMapping({"content-type": "text/html"}) + self.assertEqual(repr(dict1), repr({"Accept": "application/json"})) + self.assertEqual(repr(dict2), repr({"content-type": "text/html"})) + + def test_str(self): + dict1 = CaseInsensitiveMapping({"Accept": "application/json"}) + dict2 = CaseInsensitiveMapping({"content-type": "text/html"}) + self.assertEqual(str(dict1), str({"Accept": "application/json"})) + self.assertEqual(str(dict2), str({"content-type": "text/html"})) + + def test_equal(self): + self.assertEqual( + self.dict1, {"Accept": "application/json", "content-type": "text/html"} + ) + self.assertNotEqual( + self.dict1, {"accept": "application/jso", "Content-Type": "text/html"} + ) + self.assertNotEqual(self.dict1, "string") + + def test_items(self): + other = {"Accept": "application/json", "content-type": "text/html"} + self.assertEqual(sorted(self.dict1.items()), sorted(other.items())) + + def test_copy(self): + copy = self.dict1.copy() + self.assertIs(copy, self.dict1) + self.assertEqual(copy, self.dict1) + + def test_getitem(self): + self.assertEqual(self.dict1["Accept"], "application/json") + self.assertEqual(self.dict1["accept"], "application/json") + self.assertEqual(self.dict1["aCCept"], "application/json") + self.assertEqual(self.dict1["content-type"], "text/html") + self.assertEqual(self.dict1["Content-Type"], "text/html") + self.assertEqual(self.dict1["Content-type"], "text/html") + + def test_in(self): + self.assertIn("Accept", self.dict1) + self.assertIn("accept", self.dict1) + self.assertIn("aCCept", self.dict1) + self.assertIn("content-type", self.dict1) + self.assertIn("Content-Type", self.dict1) + + def test_del(self): + self.assertIn("Accept", self.dict1) + msg = "'CaseInsensitiveMapping' object does not support item deletion" + with self.assertRaisesMessage(TypeError, msg): + del self.dict1["Accept"] + self.assertIn("Accept", self.dict1) + + def test_set(self): + self.assertEqual(len(self.dict1), 2) + msg = "'CaseInsensitiveMapping' object does not support item assignment" + with self.assertRaisesMessage(TypeError, msg): + self.dict1["New Key"] = 1 + self.assertEqual(len(self.dict1), 2) diff --git a/testbed/django__django/tests/utils_tests/test_dateformat.py b/testbed/django__django/tests/utils_tests/test_dateformat.py new file mode 100644 index 0000000000000000000000000000000000000000..dce678e1720e2a96e5115108a3c9152ee9ec4ba0 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_dateformat.py @@ -0,0 +1,298 @@ +from datetime import date, datetime, time, timezone, tzinfo + +from django.test import SimpleTestCase, override_settings +from django.test.utils import TZ_SUPPORT, requires_tz_support +from django.utils import dateformat, translation +from django.utils.dateformat import format +from django.utils.timezone import get_default_timezone, get_fixed_timezone, make_aware + + +@override_settings(TIME_ZONE="Europe/Copenhagen") +class DateFormatTests(SimpleTestCase): + def setUp(self): + self._orig_lang = translation.get_language() + translation.activate("en-us") + + def tearDown(self): + translation.activate(self._orig_lang) + + def test_date(self): + d = date(2009, 5, 16) + self.assertEqual(date.fromtimestamp(int(format(d, "U"))), d) + + def test_naive_datetime(self): + dt = datetime(2009, 5, 16, 5, 30, 30) + self.assertEqual(datetime.fromtimestamp(int(format(dt, "U"))), dt) + + def test_naive_ambiguous_datetime(self): + # dt is ambiguous in Europe/Copenhagen. + dt = datetime(2015, 10, 25, 2, 30, 0) + + # Try all formatters that involve self.timezone. + self.assertEqual(format(dt, "I"), "") + self.assertEqual(format(dt, "O"), "") + self.assertEqual(format(dt, "T"), "") + self.assertEqual(format(dt, "Z"), "") + + @requires_tz_support + def test_datetime_with_local_tzinfo(self): + ltz = get_default_timezone() + dt = make_aware(datetime(2009, 5, 16, 5, 30, 30), ltz) + self.assertEqual(datetime.fromtimestamp(int(format(dt, "U")), ltz), dt) + self.assertEqual( + datetime.fromtimestamp(int(format(dt, "U"))), dt.replace(tzinfo=None) + ) + + @requires_tz_support + def test_datetime_with_tzinfo(self): + tz = get_fixed_timezone(-510) + ltz = get_default_timezone() + dt = make_aware(datetime(2009, 5, 16, 5, 30, 30), ltz) + self.assertEqual(datetime.fromtimestamp(int(format(dt, "U")), tz), dt) + self.assertEqual(datetime.fromtimestamp(int(format(dt, "U")), ltz), dt) + # astimezone() is safe here because the target timezone doesn't have DST + self.assertEqual( + datetime.fromtimestamp(int(format(dt, "U"))), + dt.astimezone(ltz).replace(tzinfo=None), + ) + self.assertEqual( + datetime.fromtimestamp(int(format(dt, "U")), tz).timetuple(), + dt.astimezone(tz).timetuple(), + ) + self.assertEqual( + datetime.fromtimestamp(int(format(dt, "U")), ltz).timetuple(), + dt.astimezone(ltz).timetuple(), + ) + + def test_epoch(self): + udt = datetime(1970, 1, 1, tzinfo=timezone.utc) + self.assertEqual(format(udt, "U"), "0") + + def test_empty_format(self): + my_birthday = datetime(1979, 7, 8, 22, 00) + + self.assertEqual(dateformat.format(my_birthday, ""), "") + + def test_am_pm(self): + morning = time(7, 00) + evening = time(19, 00) + self.assertEqual(dateformat.format(morning, "a"), "a.m.") + self.assertEqual(dateformat.format(evening, "a"), "p.m.") + self.assertEqual(dateformat.format(morning, "A"), "AM") + self.assertEqual(dateformat.format(evening, "A"), "PM") + + def test_microsecond(self): + # Regression test for #18951 + dt = datetime(2009, 5, 16, microsecond=123) + self.assertEqual(dateformat.format(dt, "u"), "000123") + + def test_date_formats(self): + # Specifiers 'I', 'r', and 'U' are covered in test_timezones(). + my_birthday = datetime(1979, 7, 8, 22, 00) + for specifier, expected in [ + ("b", "jul"), + ("d", "08"), + ("D", "Sun"), + ("E", "July"), + ("F", "July"), + ("j", "8"), + ("l", "Sunday"), + ("L", "False"), + ("m", "07"), + ("M", "Jul"), + ("n", "7"), + ("N", "July"), + ("o", "1979"), + ("S", "th"), + ("t", "31"), + ("w", "0"), + ("W", "27"), + ("y", "79"), + ("Y", "1979"), + ("z", "189"), + ]: + with self.subTest(specifier=specifier): + self.assertEqual(dateformat.format(my_birthday, specifier), expected) + + def test_date_formats_c_format(self): + timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456) + self.assertEqual( + dateformat.format(timestamp, "c"), "2008-05-19T11:45:23.123456" + ) + + def test_time_formats(self): + # Specifiers 'I', 'r', and 'U' are covered in test_timezones(). + my_birthday = datetime(1979, 7, 8, 22, 00) + for specifier, expected in [ + ("a", "p.m."), + ("A", "PM"), + ("f", "10"), + ("g", "10"), + ("G", "22"), + ("h", "10"), + ("H", "22"), + ("i", "00"), + ("P", "10 p.m."), + ("s", "00"), + ("u", "000000"), + ]: + with self.subTest(specifier=specifier): + self.assertEqual(dateformat.format(my_birthday, specifier), expected) + + def test_dateformat(self): + my_birthday = datetime(1979, 7, 8, 22, 00) + + self.assertEqual(dateformat.format(my_birthday, r"Y z \C\E\T"), "1979 189 CET") + + self.assertEqual(dateformat.format(my_birthday, r"jS \o\f F"), "8th of July") + + def test_futuredates(self): + the_future = datetime(2100, 10, 25, 0, 00) + self.assertEqual(dateformat.format(the_future, r"Y"), "2100") + + def test_day_of_year_leap(self): + self.assertEqual(dateformat.format(datetime(2000, 12, 31), "z"), "366") + + def test_timezones(self): + my_birthday = datetime(1979, 7, 8, 22, 00) + summertime = datetime(2005, 10, 30, 1, 00) + wintertime = datetime(2005, 10, 30, 4, 00) + noon = time(12, 0, 0) + + # 3h30m to the west of UTC + tz = get_fixed_timezone(-210) + aware_dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=tz) + + if TZ_SUPPORT: + for specifier, expected in [ + ("e", ""), + ("O", "+0100"), + ("r", "Sun, 08 Jul 1979 22:00:00 +0100"), + ("T", "CET"), + ("U", "300315600"), + ("Z", "3600"), + ]: + with self.subTest(specifier=specifier): + self.assertEqual( + dateformat.format(my_birthday, specifier), expected + ) + + self.assertEqual(dateformat.format(aware_dt, "e"), "-0330") + self.assertEqual( + dateformat.format(aware_dt, "r"), + "Sat, 16 May 2009 05:30:30 -0330", + ) + + self.assertEqual(dateformat.format(summertime, "I"), "1") + self.assertEqual(dateformat.format(summertime, "O"), "+0200") + + self.assertEqual(dateformat.format(wintertime, "I"), "0") + self.assertEqual(dateformat.format(wintertime, "O"), "+0100") + + for specifier in ["e", "O", "T", "Z"]: + with self.subTest(specifier=specifier): + self.assertEqual(dateformat.time_format(noon, specifier), "") + + # Ticket #16924 -- We don't need timezone support to test this + self.assertEqual(dateformat.format(aware_dt, "O"), "-0330") + + def test_invalid_time_format_specifiers(self): + my_birthday = date(1984, 8, 7) + + for specifier in ["a", "A", "f", "g", "G", "h", "H", "i", "P", "s", "u"]: + with self.subTest(specifier=specifier): + msg = ( + "The format for date objects may not contain time-related " + f"format specifiers (found {specifier!r})." + ) + with self.assertRaisesMessage(TypeError, msg): + dateformat.format(my_birthday, specifier) + + @requires_tz_support + def test_e_format_with_named_time_zone(self): + dt = datetime(1970, 1, 1, tzinfo=timezone.utc) + self.assertEqual(dateformat.format(dt, "e"), "UTC") + + @requires_tz_support + def test_e_format_with_time_zone_with_unimplemented_tzname(self): + class NoNameTZ(tzinfo): + """Time zone without .tzname() defined.""" + + def utcoffset(self, dt): + return None + + dt = datetime(1970, 1, 1, tzinfo=NoNameTZ()) + self.assertEqual(dateformat.format(dt, "e"), "") + + def test_P_format(self): + for expected, t in [ + ("midnight", time(0)), + ("noon", time(12)), + ("4 a.m.", time(4)), + ("8:30 a.m.", time(8, 30)), + ("4 p.m.", time(16)), + ("8:30 p.m.", time(20, 30)), + ]: + with self.subTest(time=t): + self.assertEqual(dateformat.time_format(t, "P"), expected) + + def test_r_format_with_date(self): + # Assume midnight in default timezone if datetime.date provided. + dt = date(2022, 7, 1) + self.assertEqual( + dateformat.format(dt, "r"), + "Fri, 01 Jul 2022 00:00:00 +0200", + ) + + def test_r_format_with_non_en_locale(self): + # Changing the locale doesn't change the "r" format. + dt = datetime(1979, 7, 8, 22, 00) + with translation.override("fr"): + self.assertEqual( + dateformat.format(dt, "r"), + "Sun, 08 Jul 1979 22:00:00 +0100", + ) + + def test_S_format(self): + for expected, days in [ + ("st", [1, 21, 31]), + ("nd", [2, 22]), + ("rd", [3, 23]), + ("th", (n for n in range(4, 31) if n not in [21, 22, 23])), + ]: + for day in days: + dt = date(1970, 1, day) + with self.subTest(day=day): + self.assertEqual(dateformat.format(dt, "S"), expected) + + def test_y_format_year_before_1000(self): + tests = [ + (476, "76"), + (42, "42"), + (4, "04"), + ] + for year, expected_date in tests: + with self.subTest(year=year): + self.assertEqual( + dateformat.format(datetime(year, 9, 8, 5, 0), "y"), + expected_date, + ) + + def test_Y_format_year_before_1000(self): + self.assertEqual(dateformat.format(datetime(1, 1, 1), "Y"), "0001") + self.assertEqual(dateformat.format(datetime(999, 1, 1), "Y"), "0999") + + def test_twelve_hour_format(self): + tests = [ + (0, "12", "12"), + (1, "1", "01"), + (11, "11", "11"), + (12, "12", "12"), + (13, "1", "01"), + (23, "11", "11"), + ] + for hour, g_expected, h_expected in tests: + dt = datetime(2000, 1, 1, hour) + with self.subTest(hour=hour): + self.assertEqual(dateformat.format(dt, "g"), g_expected) + self.assertEqual(dateformat.format(dt, "h"), h_expected) diff --git a/testbed/django__django/tests/utils_tests/test_dateparse.py b/testbed/django__django/tests/utils_tests/test_dateparse.py new file mode 100644 index 0000000000000000000000000000000000000000..17d532a09f6d1b2734bff46b4d884f0bc1e1bf4a --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_dateparse.py @@ -0,0 +1,225 @@ +import unittest +from datetime import date, datetime, time, timedelta + +from django.utils.dateparse import ( + parse_date, + parse_datetime, + parse_duration, + parse_time, +) +from django.utils.timezone import get_fixed_timezone +from django.utils.version import PY311 + + +class DateParseTests(unittest.TestCase): + def test_parse_date(self): + # Valid inputs + self.assertEqual(parse_date("2012-04-23"), date(2012, 4, 23)) + self.assertEqual(parse_date("2012-4-9"), date(2012, 4, 9)) + if PY311: + self.assertEqual(parse_date("20120423"), date(2012, 4, 23)) + # Invalid inputs + self.assertIsNone(parse_date("2012423")) + with self.assertRaises(ValueError): + parse_date("2012-04-56") + + def test_parse_time(self): + # Valid inputs + self.assertEqual(parse_time("09:15:00"), time(9, 15)) + if PY311: + self.assertEqual(parse_time("091500"), time(9, 15)) + self.assertEqual(parse_time("10:10"), time(10, 10)) + self.assertEqual(parse_time("10:20:30.400"), time(10, 20, 30, 400000)) + self.assertEqual(parse_time("10:20:30,400"), time(10, 20, 30, 400000)) + self.assertEqual(parse_time("4:8:16"), time(4, 8, 16)) + # Time zone offset is ignored. + self.assertEqual(parse_time("00:05:23+04:00"), time(0, 5, 23)) + # Invalid inputs + self.assertIsNone(parse_time("00:05:")) + self.assertIsNone(parse_time("00:05:23,")) + self.assertIsNone(parse_time("00:05:23+")) + self.assertIsNone(parse_time("00:05:23+25:00")) + self.assertIsNone(parse_time("4:18:101")) + self.assertIsNone(parse_time("91500")) + with self.assertRaises(ValueError): + parse_time("09:15:90") + + def test_parse_datetime(self): + valid_inputs = ( + ("2012-04-23T09:15:00", datetime(2012, 4, 23, 9, 15)), + ("2012-4-9 4:8:16", datetime(2012, 4, 9, 4, 8, 16)), + ( + "2012-04-23T09:15:00Z", + datetime(2012, 4, 23, 9, 15, 0, 0, get_fixed_timezone(0)), + ), + ( + "2012-4-9 4:8:16-0320", + datetime(2012, 4, 9, 4, 8, 16, 0, get_fixed_timezone(-200)), + ), + ( + "2012-04-23T10:20:30.400+02:30", + datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(150)), + ), + ( + "2012-04-23T10:20:30.400+02", + datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(120)), + ), + ( + "2012-04-23T10:20:30.400-02", + datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(-120)), + ), + ( + "2012-04-23T10:20:30,400-02", + datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(-120)), + ), + ( + "2012-04-23T10:20:30.400 +0230", + datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(150)), + ), + ( + "2012-04-23T10:20:30,400 +00", + datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(0)), + ), + ( + "2012-04-23T10:20:30 -02", + datetime(2012, 4, 23, 10, 20, 30, 0, get_fixed_timezone(-120)), + ), + ) + for source, expected in valid_inputs: + with self.subTest(source=source): + self.assertEqual(parse_datetime(source), expected) + + # Invalid inputs + self.assertIsNone(parse_datetime("20120423091500")) + with self.assertRaises(ValueError): + parse_datetime("2012-04-56T09:15:90") + + +class DurationParseTests(unittest.TestCase): + def test_parse_python_format(self): + timedeltas = [ + timedelta( + days=4, minutes=15, seconds=30, milliseconds=100 + ), # fractions of seconds + timedelta(hours=10, minutes=15, seconds=30), # hours, minutes, seconds + timedelta(days=4, minutes=15, seconds=30), # multiple days + timedelta(days=1, minutes=00, seconds=00), # single day + timedelta(days=-4, minutes=15, seconds=30), # negative durations + timedelta(minutes=15, seconds=30), # minute & seconds + timedelta(seconds=30), # seconds + ] + for delta in timedeltas: + with self.subTest(delta=delta): + self.assertEqual(parse_duration(format(delta)), delta) + + def test_parse_postgresql_format(self): + test_values = ( + ("1 day", timedelta(1)), + ("-1 day", timedelta(-1)), + ("1 day 0:00:01", timedelta(days=1, seconds=1)), + ("1 day -0:00:01", timedelta(days=1, seconds=-1)), + ("-1 day -0:00:01", timedelta(days=-1, seconds=-1)), + ("-1 day +0:00:01", timedelta(days=-1, seconds=1)), + ( + "4 days 0:15:30.1", + timedelta(days=4, minutes=15, seconds=30, milliseconds=100), + ), + ( + "4 days 0:15:30.0001", + timedelta(days=4, minutes=15, seconds=30, microseconds=100), + ), + ("-4 days -15:00:30", timedelta(days=-4, hours=-15, seconds=-30)), + ) + for source, expected in test_values: + with self.subTest(source=source): + self.assertEqual(parse_duration(source), expected) + + def test_seconds(self): + self.assertEqual(parse_duration("30"), timedelta(seconds=30)) + + def test_minutes_seconds(self): + self.assertEqual(parse_duration("15:30"), timedelta(minutes=15, seconds=30)) + self.assertEqual(parse_duration("5:30"), timedelta(minutes=5, seconds=30)) + + def test_hours_minutes_seconds(self): + self.assertEqual( + parse_duration("10:15:30"), timedelta(hours=10, minutes=15, seconds=30) + ) + self.assertEqual( + parse_duration("1:15:30"), timedelta(hours=1, minutes=15, seconds=30) + ) + self.assertEqual( + parse_duration("100:200:300"), + timedelta(hours=100, minutes=200, seconds=300), + ) + + def test_days(self): + self.assertEqual( + parse_duration("4 15:30"), timedelta(days=4, minutes=15, seconds=30) + ) + self.assertEqual( + parse_duration("4 10:15:30"), + timedelta(days=4, hours=10, minutes=15, seconds=30), + ) + + def test_fractions_of_seconds(self): + test_values = ( + ("15:30.1", timedelta(minutes=15, seconds=30, milliseconds=100)), + ("15:30.01", timedelta(minutes=15, seconds=30, milliseconds=10)), + ("15:30.001", timedelta(minutes=15, seconds=30, milliseconds=1)), + ("15:30.0001", timedelta(minutes=15, seconds=30, microseconds=100)), + ("15:30.00001", timedelta(minutes=15, seconds=30, microseconds=10)), + ("15:30.000001", timedelta(minutes=15, seconds=30, microseconds=1)), + ("15:30,000001", timedelta(minutes=15, seconds=30, microseconds=1)), + ) + for source, expected in test_values: + with self.subTest(source=source): + self.assertEqual(parse_duration(source), expected) + + def test_negative(self): + test_values = ( + ("-4 15:30", timedelta(days=-4, minutes=15, seconds=30)), + ("-172800", timedelta(days=-2)), + ("-15:30", timedelta(minutes=-15, seconds=-30)), + ("-1:15:30", timedelta(hours=-1, minutes=-15, seconds=-30)), + ("-30.1", timedelta(seconds=-30, milliseconds=-100)), + ("-30,1", timedelta(seconds=-30, milliseconds=-100)), + ("-00:01:01", timedelta(minutes=-1, seconds=-1)), + ("-01:01", timedelta(seconds=-61)), + ("-01:-01", None), + ) + for source, expected in test_values: + with self.subTest(source=source): + self.assertEqual(parse_duration(source), expected) + + def test_iso_8601(self): + test_values = ( + ("P4Y", None), + ("P4M", None), + ("P4W", None), + ("P4D", timedelta(days=4)), + ("-P1D", timedelta(days=-1)), + ("P0.5D", timedelta(hours=12)), + ("P0,5D", timedelta(hours=12)), + ("-P0.5D", timedelta(hours=-12)), + ("-P0,5D", timedelta(hours=-12)), + ("PT5H", timedelta(hours=5)), + ("-PT5H", timedelta(hours=-5)), + ("PT5M", timedelta(minutes=5)), + ("-PT5M", timedelta(minutes=-5)), + ("PT5S", timedelta(seconds=5)), + ("-PT5S", timedelta(seconds=-5)), + ("PT0.000005S", timedelta(microseconds=5)), + ("PT0,000005S", timedelta(microseconds=5)), + ("-PT0.000005S", timedelta(microseconds=-5)), + ("-PT0,000005S", timedelta(microseconds=-5)), + ("-P4DT1H", timedelta(days=-4, hours=-1)), + # Invalid separators for decimal fractions. + ("P3(3D", None), + ("PT3)3H", None), + ("PT3|3M", None), + ("PT3/3S", None), + ) + for source, expected in test_values: + with self.subTest(source=source): + self.assertEqual(parse_duration(source), expected) diff --git a/testbed/django__django/tests/utils_tests/test_deconstruct.py b/testbed/django__django/tests/utils_tests/test_deconstruct.py new file mode 100644 index 0000000000000000000000000000000000000000..e90ff3267690e492a1476028cb2556573e458f04 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_deconstruct.py @@ -0,0 +1,96 @@ +from django.test import SimpleTestCase +from django.utils.deconstruct import deconstructible +from django.utils.version import get_docs_version + + +@deconstructible() +class DeconstructibleClass: + pass + + +class DeconstructibleChildClass(DeconstructibleClass): + pass + + +@deconstructible( + path="utils_tests.deconstructible_classes.DeconstructibleWithPathClass" +) +class DeconstructibleWithPathClass: + pass + + +class DeconstructibleWithPathChildClass(DeconstructibleWithPathClass): + pass + + +@deconstructible( + path="utils_tests.deconstructible_classes.DeconstructibleInvalidPathClass", +) +class DeconstructibleInvalidPathClass: + pass + + +class DeconstructibleInvalidPathChildClass(DeconstructibleInvalidPathClass): + pass + + +class DeconstructibleTests(SimpleTestCase): + def test_deconstruct(self): + obj = DeconstructibleClass("arg", key="value") + path, args, kwargs = obj.deconstruct() + self.assertEqual(path, "utils_tests.test_deconstruct.DeconstructibleClass") + self.assertEqual(args, ("arg",)) + self.assertEqual(kwargs, {"key": "value"}) + + def test_deconstruct_with_path(self): + obj = DeconstructibleWithPathClass("arg", key="value") + path, args, kwargs = obj.deconstruct() + self.assertEqual( + path, + "utils_tests.deconstructible_classes.DeconstructibleWithPathClass", + ) + self.assertEqual(args, ("arg",)) + self.assertEqual(kwargs, {"key": "value"}) + + def test_deconstruct_child(self): + obj = DeconstructibleChildClass("arg", key="value") + path, args, kwargs = obj.deconstruct() + self.assertEqual(path, "utils_tests.test_deconstruct.DeconstructibleChildClass") + self.assertEqual(args, ("arg",)) + self.assertEqual(kwargs, {"key": "value"}) + + def test_deconstruct_child_with_path(self): + obj = DeconstructibleWithPathChildClass("arg", key="value") + path, args, kwargs = obj.deconstruct() + self.assertEqual( + path, + "utils_tests.test_deconstruct.DeconstructibleWithPathChildClass", + ) + self.assertEqual(args, ("arg",)) + self.assertEqual(kwargs, {"key": "value"}) + + def test_invalid_path(self): + obj = DeconstructibleInvalidPathClass() + docs_version = get_docs_version() + msg = ( + f"Could not find object DeconstructibleInvalidPathClass in " + f"utils_tests.deconstructible_classes.\n" + f"Please note that you cannot serialize things like inner " + f"classes. Please move the object into the main module body to " + f"use migrations.\n" + f"For more information, see " + f"https://docs.djangoproject.com/en/{docs_version}/topics/" + f"migrations/#serializing-values" + ) + with self.assertRaisesMessage(ValueError, msg): + obj.deconstruct() + + def test_parent_invalid_path(self): + obj = DeconstructibleInvalidPathChildClass("arg", key="value") + path, args, kwargs = obj.deconstruct() + self.assertEqual( + path, + "utils_tests.test_deconstruct.DeconstructibleInvalidPathChildClass", + ) + self.assertEqual(args, ("arg",)) + self.assertEqual(kwargs, {"key": "value"}) diff --git a/testbed/django__django/tests/utils_tests/test_decorators.py b/testbed/django__django/tests/utils_tests/test_decorators.py new file mode 100644 index 0000000000000000000000000000000000000000..8c0244e8198a1efbfdbc63a6cb58e4dd4a5da161 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_decorators.py @@ -0,0 +1,119 @@ +from django.http import HttpResponse +from django.template import engines +from django.template.response import TemplateResponse +from django.test import RequestFactory, SimpleTestCase +from django.utils.decorators import decorator_from_middleware + + +class ProcessViewMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def process_view(self, request, view_func, view_args, view_kwargs): + pass + + +process_view_dec = decorator_from_middleware(ProcessViewMiddleware) + + +@process_view_dec +def process_view(request): + return HttpResponse() + + +class ClassProcessView: + def __call__(self, request): + return HttpResponse() + + +class_process_view = process_view_dec(ClassProcessView()) + + +class FullMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def process_request(self, request): + request.process_request_reached = True + + def process_view(self, request, view_func, view_args, view_kwargs): + request.process_view_reached = True + + def process_template_response(self, request, response): + request.process_template_response_reached = True + return response + + def process_response(self, request, response): + # This should never receive unrendered content. + request.process_response_content = response.content + request.process_response_reached = True + return response + + +full_dec = decorator_from_middleware(FullMiddleware) + + +class DecoratorFromMiddlewareTests(SimpleTestCase): + """ + Tests for view decorators created using + ``django.utils.decorators.decorator_from_middleware``. + """ + + rf = RequestFactory() + + def test_process_view_middleware(self): + """ + Test a middleware that implements process_view. + """ + process_view(self.rf.get("/")) + + def test_callable_process_view_middleware(self): + """ + Test a middleware that implements process_view, operating on a callable class. + """ + class_process_view(self.rf.get("/")) + + def test_full_dec_normal(self): + """ + All methods of middleware are called for normal HttpResponses + """ + + @full_dec + def normal_view(request): + template = engines["django"].from_string("Hello world") + return HttpResponse(template.render()) + + request = self.rf.get("/") + normal_view(request) + self.assertTrue(getattr(request, "process_request_reached", False)) + self.assertTrue(getattr(request, "process_view_reached", False)) + # process_template_response must not be called for HttpResponse + self.assertFalse(getattr(request, "process_template_response_reached", False)) + self.assertTrue(getattr(request, "process_response_reached", False)) + + def test_full_dec_templateresponse(self): + """ + All methods of middleware are called for TemplateResponses in + the right sequence. + """ + + @full_dec + def template_response_view(request): + template = engines["django"].from_string("Hello world") + return TemplateResponse(request, template) + + request = self.rf.get("/") + response = template_response_view(request) + self.assertTrue(getattr(request, "process_request_reached", False)) + self.assertTrue(getattr(request, "process_view_reached", False)) + self.assertTrue(getattr(request, "process_template_response_reached", False)) + # response must not be rendered yet. + self.assertFalse(response._is_rendered) + # process_response must not be called until after response is rendered, + # otherwise some decorators like csrf_protect and gzip_page will not + # work correctly. See #16004 + self.assertFalse(getattr(request, "process_response_reached", False)) + response.render() + self.assertTrue(getattr(request, "process_response_reached", False)) + # process_response saw the rendered content + self.assertEqual(request.process_response_content, b"Hello world") diff --git a/testbed/django__django/tests/utils_tests/test_duration.py b/testbed/django__django/tests/utils_tests/test_duration.py new file mode 100644 index 0000000000000000000000000000000000000000..fc6cbc33629230b8447ab1c8df5af7680aed7f60 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_duration.py @@ -0,0 +1,100 @@ +import datetime +import unittest + +from django.utils.dateparse import parse_duration +from django.utils.duration import ( + duration_iso_string, + duration_microseconds, + duration_string, +) + + +class TestDurationString(unittest.TestCase): + def test_simple(self): + duration = datetime.timedelta(hours=1, minutes=3, seconds=5) + self.assertEqual(duration_string(duration), "01:03:05") + + def test_days(self): + duration = datetime.timedelta(days=1, hours=1, minutes=3, seconds=5) + self.assertEqual(duration_string(duration), "1 01:03:05") + + def test_microseconds(self): + duration = datetime.timedelta(hours=1, minutes=3, seconds=5, microseconds=12345) + self.assertEqual(duration_string(duration), "01:03:05.012345") + + def test_negative(self): + duration = datetime.timedelta(days=-1, hours=1, minutes=3, seconds=5) + self.assertEqual(duration_string(duration), "-1 01:03:05") + + +class TestParseDurationRoundtrip(unittest.TestCase): + def test_simple(self): + duration = datetime.timedelta(hours=1, minutes=3, seconds=5) + self.assertEqual(parse_duration(duration_string(duration)), duration) + + def test_days(self): + duration = datetime.timedelta(days=1, hours=1, minutes=3, seconds=5) + self.assertEqual(parse_duration(duration_string(duration)), duration) + + def test_microseconds(self): + duration = datetime.timedelta(hours=1, minutes=3, seconds=5, microseconds=12345) + self.assertEqual(parse_duration(duration_string(duration)), duration) + + def test_negative(self): + duration = datetime.timedelta(days=-1, hours=1, minutes=3, seconds=5) + self.assertEqual(parse_duration(duration_string(duration)), duration) + + +class TestISODurationString(unittest.TestCase): + def test_simple(self): + duration = datetime.timedelta(hours=1, minutes=3, seconds=5) + self.assertEqual(duration_iso_string(duration), "P0DT01H03M05S") + + def test_days(self): + duration = datetime.timedelta(days=1, hours=1, minutes=3, seconds=5) + self.assertEqual(duration_iso_string(duration), "P1DT01H03M05S") + + def test_microseconds(self): + duration = datetime.timedelta(hours=1, minutes=3, seconds=5, microseconds=12345) + self.assertEqual(duration_iso_string(duration), "P0DT01H03M05.012345S") + + def test_negative(self): + duration = -1 * datetime.timedelta(days=1, hours=1, minutes=3, seconds=5) + self.assertEqual(duration_iso_string(duration), "-P1DT01H03M05S") + + +class TestParseISODurationRoundtrip(unittest.TestCase): + def test_simple(self): + duration = datetime.timedelta(hours=1, minutes=3, seconds=5) + self.assertEqual(parse_duration(duration_iso_string(duration)), duration) + + def test_days(self): + duration = datetime.timedelta(days=1, hours=1, minutes=3, seconds=5) + self.assertEqual(parse_duration(duration_iso_string(duration)), duration) + + def test_microseconds(self): + duration = datetime.timedelta(hours=1, minutes=3, seconds=5, microseconds=12345) + self.assertEqual(parse_duration(duration_iso_string(duration)), duration) + + def test_negative(self): + duration = datetime.timedelta(days=-1, hours=1, minutes=3, seconds=5) + self.assertEqual( + parse_duration(duration_iso_string(duration)).total_seconds(), + duration.total_seconds(), + ) + + +class TestDurationMicroseconds(unittest.TestCase): + def test(self): + deltas = [ + datetime.timedelta.max, + datetime.timedelta.min, + datetime.timedelta.resolution, + -datetime.timedelta.resolution, + datetime.timedelta(microseconds=8999999999999999), + ] + for delta in deltas: + with self.subTest(delta=delta): + self.assertEqual( + datetime.timedelta(microseconds=duration_microseconds(delta)), delta + ) diff --git a/testbed/django__django/tests/utils_tests/test_encoding.py b/testbed/django__django/tests/utils_tests/test_encoding.py new file mode 100644 index 0000000000000000000000000000000000000000..6dea260b841baef24c80a3714abe83383c367fa0 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_encoding.py @@ -0,0 +1,220 @@ +import datetime +import sys +import unittest +from pathlib import Path +from unittest import mock +from urllib.parse import quote_plus + +from django.test import SimpleTestCase +from django.utils.encoding import ( + DjangoUnicodeDecodeError, + escape_uri_path, + filepath_to_uri, + force_bytes, + force_str, + get_system_encoding, + iri_to_uri, + repercent_broken_unicode, + smart_bytes, + smart_str, + uri_to_iri, +) +from django.utils.functional import SimpleLazyObject +from django.utils.translation import gettext_lazy + + +class TestEncodingUtils(SimpleTestCase): + def test_force_str_exception(self): + """ + Broken __str__ actually raises an error. + """ + + class MyString: + def __str__(self): + return b"\xc3\xb6\xc3\xa4\xc3\xbc" + + # str(s) raises a TypeError if the result is not a text type. + with self.assertRaises(TypeError): + force_str(MyString()) + + def test_force_str_lazy(self): + s = SimpleLazyObject(lambda: "x") + self.assertIs(type(force_str(s)), str) + + def test_force_str_DjangoUnicodeDecodeError(self): + msg = ( + "'utf-8' codec can't decode byte 0xff in position 0: invalid " + "start byte. You passed in b'\\xff' ()" + ) + with self.assertRaisesMessage(DjangoUnicodeDecodeError, msg): + force_str(b"\xff") + + def test_force_bytes_exception(self): + """ + force_bytes knows how to convert to bytes an exception + containing non-ASCII characters in its args. + """ + error_msg = "This is an exception, voilà" + exc = ValueError(error_msg) + self.assertEqual(force_bytes(exc), error_msg.encode()) + self.assertEqual( + force_bytes(exc, encoding="ascii", errors="ignore"), + b"This is an exception, voil", + ) + + def test_force_bytes_strings_only(self): + today = datetime.date.today() + self.assertEqual(force_bytes(today, strings_only=True), today) + + def test_force_bytes_encoding(self): + error_msg = "This is an exception, voilà".encode() + result = force_bytes(error_msg, encoding="ascii", errors="ignore") + self.assertEqual(result, b"This is an exception, voil") + + def test_force_bytes_memory_view(self): + data = b"abc" + result = force_bytes(memoryview(data)) + # Type check is needed because memoryview(bytes) == bytes. + self.assertIs(type(result), bytes) + self.assertEqual(result, data) + + def test_smart_bytes(self): + class Test: + def __str__(self): + return "ŠĐĆŽćžšđ" + + lazy_func = gettext_lazy("x") + self.assertIs(smart_bytes(lazy_func), lazy_func) + self.assertEqual( + smart_bytes(Test()), + b"\xc5\xa0\xc4\x90\xc4\x86\xc5\xbd\xc4\x87\xc5\xbe\xc5\xa1\xc4\x91", + ) + self.assertEqual(smart_bytes(1), b"1") + self.assertEqual(smart_bytes("foo"), b"foo") + + def test_smart_str(self): + class Test: + def __str__(self): + return "ŠĐĆŽćžšđ" + + lazy_func = gettext_lazy("x") + self.assertIs(smart_str(lazy_func), lazy_func) + self.assertEqual( + smart_str(Test()), "\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" + ) + self.assertEqual(smart_str(1), "1") + self.assertEqual(smart_str("foo"), "foo") + + def test_get_default_encoding(self): + with mock.patch("locale.getlocale", side_effect=Exception): + self.assertEqual(get_system_encoding(), "ascii") + + def test_repercent_broken_unicode_recursion_error(self): + # Prepare a string long enough to force a recursion error if the tested + # function uses recursion. + data = b"\xfc" * sys.getrecursionlimit() + try: + self.assertEqual( + repercent_broken_unicode(data), b"%FC" * sys.getrecursionlimit() + ) + except RecursionError: + self.fail("Unexpected RecursionError raised.") + + +class TestRFC3987IEncodingUtils(unittest.TestCase): + def test_filepath_to_uri(self): + self.assertIsNone(filepath_to_uri(None)) + self.assertEqual( + filepath_to_uri("upload\\чубака.mp4"), + "upload/%D1%87%D1%83%D0%B1%D0%B0%D0%BA%D0%B0.mp4", + ) + self.assertEqual(filepath_to_uri(Path("upload/test.png")), "upload/test.png") + self.assertEqual(filepath_to_uri(Path("upload\\test.png")), "upload/test.png") + + def test_iri_to_uri(self): + cases = [ + # Valid UTF-8 sequences are encoded. + ("red%09rosé#red", "red%09ros%C3%A9#red"), + ("/blog/for/Jürgen Münster/", "/blog/for/J%C3%BCrgen%20M%C3%BCnster/"), + ( + "locations/%s" % quote_plus("Paris & Orléans"), + "locations/Paris+%26+Orl%C3%A9ans", + ), + # Reserved chars remain unescaped. + ("%&", "%&"), + ("red&♥ros%#red", "red&%E2%99%A5ros%#red"), + (gettext_lazy("red&♥ros%#red"), "red&%E2%99%A5ros%#red"), + ] + + for iri, uri in cases: + with self.subTest(iri): + self.assertEqual(iri_to_uri(iri), uri) + + # Test idempotency. + self.assertEqual(iri_to_uri(iri_to_uri(iri)), uri) + + def test_uri_to_iri(self): + cases = [ + (None, None), + # Valid UTF-8 sequences are decoded. + ("/%e2%89%Ab%E2%99%a5%E2%89%aB/", "/≫♥≫/"), + ("/%E2%99%A5%E2%99%A5/?utf8=%E2%9C%93", "/♥♥/?utf8=✓"), + ("/%41%5a%6B/", "/AZk/"), + # Reserved and non-URL valid ASCII chars are not decoded. + ("/%25%20%02%41%7b/", "/%25%20%02A%7b/"), + # Broken UTF-8 sequences remain escaped. + ("/%AAd%AAj%AAa%AAn%AAg%AAo%AA/", "/%AAd%AAj%AAa%AAn%AAg%AAo%AA/"), + ("/%E2%99%A5%E2%E2%99%A5/", "/♥%E2♥/"), + ("/%E2%99%A5%E2%99%E2%99%A5/", "/♥%E2%99♥/"), + ("/%E2%E2%99%A5%E2%99%A5%99/", "/%E2♥♥%99/"), + ( + "/%E2%99%A5%E2%99%A5/?utf8=%9C%93%E2%9C%93%9C%93", + "/♥♥/?utf8=%9C%93✓%9C%93", + ), + ] + + for uri, iri in cases: + with self.subTest(uri): + self.assertEqual(uri_to_iri(uri), iri) + + # Test idempotency. + self.assertEqual(uri_to_iri(uri_to_iri(uri)), iri) + + def test_complementarity(self): + cases = [ + ( + "/blog/for/J%C3%BCrgen%20M%C3%BCnster/", + "/blog/for/J\xfcrgen%20M\xfcnster/", + ), + ("%&", "%&"), + ("red&%E2%99%A5ros%#red", "red&♥ros%#red"), + ("/%E2%99%A5%E2%99%A5/", "/♥♥/"), + ("/%E2%99%A5%E2%99%A5/?utf8=%E2%9C%93", "/♥♥/?utf8=✓"), + ("/%25%20%02%7b/", "/%25%20%02%7b/"), + ("/%AAd%AAj%AAa%AAn%AAg%AAo%AA/", "/%AAd%AAj%AAa%AAn%AAg%AAo%AA/"), + ("/%E2%99%A5%E2%E2%99%A5/", "/♥%E2♥/"), + ("/%E2%99%A5%E2%99%E2%99%A5/", "/♥%E2%99♥/"), + ("/%E2%E2%99%A5%E2%99%A5%99/", "/%E2♥♥%99/"), + ( + "/%E2%99%A5%E2%99%A5/?utf8=%9C%93%E2%9C%93%9C%93", + "/♥♥/?utf8=%9C%93✓%9C%93", + ), + ] + + for uri, iri in cases: + with self.subTest(uri): + self.assertEqual(iri_to_uri(uri_to_iri(uri)), uri) + self.assertEqual(uri_to_iri(iri_to_uri(iri)), iri) + + def test_escape_uri_path(self): + cases = [ + ( + "/;some/=awful/?path/:with/@lots/&of/+awful/chars", + "/%3Bsome/%3Dawful/%3Fpath/:with/@lots/&of/+awful/chars", + ), + ("/foo#bar", "/foo%23bar"), + ("/foo?bar", "/foo%3Fbar"), + ] + for uri, expected in cases: + with self.subTest(uri): + self.assertEqual(escape_uri_path(uri), expected) diff --git a/testbed/django__django/tests/utils_tests/test_feedgenerator.py b/testbed/django__django/tests/utils_tests/test_feedgenerator.py new file mode 100644 index 0000000000000000000000000000000000000000..ee15b6e928832a1874e721eb5b2506480d2f2e2a --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_feedgenerator.py @@ -0,0 +1,150 @@ +import datetime + +from django.test import SimpleTestCase +from django.utils import feedgenerator +from django.utils.timezone import get_fixed_timezone + + +class FeedgeneratorTests(SimpleTestCase): + """ + Tests for the low-level syndication feed framework. + """ + + def test_get_tag_uri(self): + """ + get_tag_uri() correctly generates TagURIs. + """ + self.assertEqual( + feedgenerator.get_tag_uri( + "http://example.org/foo/bar#headline", datetime.date(2004, 10, 25) + ), + "tag:example.org,2004-10-25:/foo/bar/headline", + ) + + def test_get_tag_uri_with_port(self): + """ + get_tag_uri() correctly generates TagURIs from URLs with port numbers. + """ + self.assertEqual( + feedgenerator.get_tag_uri( + "http://www.example.org:8000/2008/11/14/django#headline", + datetime.datetime(2008, 11, 14, 13, 37, 0), + ), + "tag:www.example.org,2008-11-14:/2008/11/14/django/headline", + ) + + def test_rfc2822_date(self): + """ + rfc2822_date() correctly formats datetime objects. + """ + self.assertEqual( + feedgenerator.rfc2822_date(datetime.datetime(2008, 11, 14, 13, 37, 0)), + "Fri, 14 Nov 2008 13:37:00 -0000", + ) + + def test_rfc2822_date_with_timezone(self): + """ + rfc2822_date() correctly formats datetime objects with tzinfo. + """ + self.assertEqual( + feedgenerator.rfc2822_date( + datetime.datetime( + 2008, 11, 14, 13, 37, 0, tzinfo=get_fixed_timezone(60) + ) + ), + "Fri, 14 Nov 2008 13:37:00 +0100", + ) + + def test_rfc2822_date_without_time(self): + """ + rfc2822_date() correctly formats date objects. + """ + self.assertEqual( + feedgenerator.rfc2822_date(datetime.date(2008, 11, 14)), + "Fri, 14 Nov 2008 00:00:00 -0000", + ) + + def test_rfc3339_date(self): + """ + rfc3339_date() correctly formats datetime objects. + """ + self.assertEqual( + feedgenerator.rfc3339_date(datetime.datetime(2008, 11, 14, 13, 37, 0)), + "2008-11-14T13:37:00Z", + ) + + def test_rfc3339_date_with_timezone(self): + """ + rfc3339_date() correctly formats datetime objects with tzinfo. + """ + self.assertEqual( + feedgenerator.rfc3339_date( + datetime.datetime( + 2008, 11, 14, 13, 37, 0, tzinfo=get_fixed_timezone(120) + ) + ), + "2008-11-14T13:37:00+02:00", + ) + + def test_rfc3339_date_without_time(self): + """ + rfc3339_date() correctly formats date objects. + """ + self.assertEqual( + feedgenerator.rfc3339_date(datetime.date(2008, 11, 14)), + "2008-11-14T00:00:00Z", + ) + + def test_atom1_mime_type(self): + """ + Atom MIME type has UTF8 Charset parameter set + """ + atom_feed = feedgenerator.Atom1Feed("title", "link", "description") + self.assertEqual(atom_feed.content_type, "application/atom+xml; charset=utf-8") + + def test_rss_mime_type(self): + """ + RSS MIME type has UTF8 Charset parameter set + """ + rss_feed = feedgenerator.Rss201rev2Feed("title", "link", "description") + self.assertEqual(rss_feed.content_type, "application/rss+xml; charset=utf-8") + + # Two regression tests for #14202 + + def test_feed_without_feed_url_gets_rendered_without_atom_link(self): + feed = feedgenerator.Rss201rev2Feed("title", "/link/", "descr") + self.assertIsNone(feed.feed["feed_url"]) + feed_content = feed.writeString("utf-8") + self.assertNotIn("", ">"), + ('"', """), + ("'", "'"), + ) + # Substitution patterns for testing the above items. + patterns = ("%s", "asdf%sfdsa", "%s1", "1%sb") + for value, output in items: + with self.subTest(value=value, output=output): + for pattern in patterns: + with self.subTest(value=value, output=output, pattern=pattern): + self.check_output(escape, pattern % value, pattern % output) + self.check_output( + escape, lazystr(pattern % value), pattern % output + ) + # Check repeated values. + self.check_output(escape, value * 2, output * 2) + # Verify it doesn't double replace &. + self.check_output(escape, "<&", "<&") + + def test_format_html(self): + self.assertEqual( + format_html( + "{} {} {third} {fourth}", + "< Dangerous >", + mark_safe("safe"), + third="< dangerous again", + fourth=mark_safe("safe again"), + ), + "< Dangerous > safe < dangerous again safe again", + ) + + def test_format_html_no_params(self): + msg = "Calling format_html() without passing args or kwargs is deprecated." + # RemovedInDjango60Warning: when the deprecation ends, replace with: + # msg = "args or kwargs must be provided." + # with self.assertRaisesMessage(ValueError, msg): + with self.assertWarnsMessage(RemovedInDjango60Warning, msg): + name = "Adam" + self.assertEqual(format_html(f"{name}"), "Adam") + + def test_linebreaks(self): + items = ( + ("para1\n\npara2\r\rpara3", "

    para1

    \n\n

    para2

    \n\n

    para3

    "), + ( + "para1\nsub1\rsub2\n\npara2", + "

    para1
    sub1
    sub2

    \n\n

    para2

    ", + ), + ( + "para1\r\n\r\npara2\rsub1\r\rpara4", + "

    para1

    \n\n

    para2
    sub1

    \n\n

    para4

    ", + ), + ("para1\tmore\n\npara2", "

    para1\tmore

    \n\n

    para2

    "), + ) + for value, output in items: + with self.subTest(value=value, output=output): + self.check_output(linebreaks, value, output) + self.check_output(linebreaks, lazystr(value), output) + + def test_strip_tags(self): + items = ( + ( + "

    See: 'é is an apostrophe followed by e acute

    ", + "See: 'é is an apostrophe followed by e acute", + ), + ( + "

    See: 'é is an apostrophe followed by e acute

    ", + "See: 'é is an apostrophe followed by e acute", + ), + ("a", "a"), + ("a", "a"), + ("e", "e"), + ("hi, b2!", "b7>b2!"), + ("b", "b"), + ("a

    ')\">b

    c", "abc"), + ("a

    b

    c", "abc"), + ("de

    f", "def"), + ('foobar', "foobar"), + # caused infinite loop on Pythons not patched with + # https://bugs.python.org/issue20288 + ("&gotcha&#;<>", "&gotcha&#;<>"), + ("ript>test</script>", "ript>test"), + ("&h", "alert()h"), + (">br>br>br>X", "XX"), + ) + for value, output in items: + with self.subTest(value=value, output=output): + self.check_output(strip_tags, value, output) + self.check_output(strip_tags, lazystr(value), output) + + def test_strip_tags_files(self): + # Test with more lengthy content (also catching performance regressions) + for filename in ("strip_tags1.html", "strip_tags2.txt"): + with self.subTest(filename=filename): + path = os.path.join(os.path.dirname(__file__), "files", filename) + with open(path) as fp: + content = fp.read() + start = datetime.now() + stripped = strip_tags(content) + elapsed = datetime.now() - start + self.assertEqual(elapsed.seconds, 0) + self.assertIn("Test string that has not been stripped.", stripped) + self.assertNotIn("<", stripped) + + def test_strip_spaces_between_tags(self): + # Strings that should come out untouched. + items = (" ", " ", " ", " x") + for value in items: + with self.subTest(value=value): + self.check_output(strip_spaces_between_tags, value) + self.check_output(strip_spaces_between_tags, lazystr(value)) + + # Strings that have spaces to strip. + items = ( + (" ", ""), + ("

    hello

    \n

    world

    ", "

    hello

    world

    "), + ("\n

    \t

    \n

    \n", "\n

    \n"), + ) + for value, output in items: + with self.subTest(value=value, output=output): + self.check_output(strip_spaces_between_tags, value, output) + self.check_output(strip_spaces_between_tags, lazystr(value), output) + + def test_escapejs(self): + items = ( + ( + "\"double quotes\" and 'single quotes'", + "\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027", + ), + (r"\ : backslashes, too", "\\u005C : backslashes, too"), + ( + "and lots of whitespace: \r\n\t\v\f\b", + "and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008", + ), + ( + r"", + "\\u003Cscript\\u003Eand this\\u003C/script\\u003E", + ), + ( + "paragraph separator:\u2029and line separator:\u2028", + "paragraph separator:\\u2029and line separator:\\u2028", + ), + ("`", "\\u0060"), + ) + for value, output in items: + with self.subTest(value=value, output=output): + self.check_output(escapejs, value, output) + self.check_output(escapejs, lazystr(value), output) + + def test_json_script(self): + tests = ( + # "<", ">" and "&" are quoted inside JSON strings + ( + ( + "&<>", + '', + ) + ), + # "<", ">" and "&" are quoted inside JSON objects + ( + {"a": ""}, + '", + ), + # Lazy strings are quoted + ( + lazystr("&<>"), + '", + ), + ( + {"a": lazystr("")}, + '", + ), + ) + for arg, expected in tests: + with self.subTest(arg=arg): + self.assertEqual(json_script(arg, "test_id"), expected) + + def test_json_script_custom_encoder(self): + class CustomDjangoJSONEncoder(DjangoJSONEncoder): + def encode(self, o): + return '{"hello": "world"}' + + self.assertHTMLEqual( + json_script({}, encoder=CustomDjangoJSONEncoder), + '', + ) + + def test_json_script_without_id(self): + self.assertHTMLEqual( + json_script({"key": "value"}), + '', + ) + + def test_smart_urlquote(self): + items = ( + ("http://öäü.com/", "http://xn--4ca9at.com/"), + ("http://öäü.com/öäü/", "http://xn--4ca9at.com/%C3%B6%C3%A4%C3%BC/"), + # Everything unsafe is quoted, !*'();:@&=+$,/?#[]~ is considered + # safe as per RFC. + ( + "http://example.com/path/öäü/", + "http://example.com/path/%C3%B6%C3%A4%C3%BC/", + ), + ("http://example.com/%C3%B6/ä/", "http://example.com/%C3%B6/%C3%A4/"), + ("http://example.com/?x=1&y=2+3&z=", "http://example.com/?x=1&y=2+3&z="), + ("http://example.com/?x=<>\"'", "http://example.com/?x=%3C%3E%22%27"), + ( + "http://example.com/?q=http://example.com/?x=1%26q=django", + "http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3D" + "django", + ), + ( + "http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3D" + "django", + "http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3D" + "django", + ), + ("http://.www.f oo.bar/", "http://.www.f%20oo.bar/"), + ) + # IDNs are properly quoted + for value, output in items: + with self.subTest(value=value, output=output): + self.assertEqual(smart_urlquote(value), output) + + def test_conditional_escape(self): + s = "

    interop

    " + self.assertEqual(conditional_escape(s), "<h1>interop</h1>") + self.assertEqual(conditional_escape(mark_safe(s)), s) + self.assertEqual(conditional_escape(lazystr(mark_safe(s))), s) + + def test_html_safe(self): + @html_safe + class HtmlClass: + def __str__(self): + return "

    I'm a html class!

    " + + html_obj = HtmlClass() + self.assertTrue(hasattr(HtmlClass, "__html__")) + self.assertTrue(hasattr(html_obj, "__html__")) + self.assertEqual(str(html_obj), html_obj.__html__()) + + def test_html_safe_subclass(self): + class BaseClass: + def __html__(self): + # defines __html__ on its own + return "some html content" + + def __str__(self): + return "some non html content" + + @html_safe + class Subclass(BaseClass): + def __str__(self): + # overrides __str__ and is marked as html_safe + return "some html safe content" + + subclass_obj = Subclass() + self.assertEqual(str(subclass_obj), subclass_obj.__html__()) + + def test_html_safe_defines_html_error(self): + msg = "can't apply @html_safe to HtmlClass because it defines __html__()." + with self.assertRaisesMessage(ValueError, msg): + + @html_safe + class HtmlClass: + def __html__(self): + return "

    I'm a html class!

    " + + def test_html_safe_doesnt_define_str(self): + msg = "can't apply @html_safe to HtmlClass because it doesn't define __str__()." + with self.assertRaisesMessage(ValueError, msg): + + @html_safe + class HtmlClass: + pass + + def test_urlize(self): + tests = ( + ( + "Search for google.com/?q=! and see.", + 'Search for google.com/?q=! and ' + "see.", + ), + ( + "Search for google.com/?q=1<! and see.", + 'Search for google.com/?q=1<' + "! and see.", + ), + ( + lazystr("Search for google.com/?q=!"), + 'Search for google.com/?q=!', + ), + ("foo@example.com", 'foo@example.com'), + ) + for value, output in tests: + with self.subTest(value=value): + self.assertEqual(urlize(value), output) + + def test_urlize_unchanged_inputs(self): + tests = ( + ("a" + "@a" * 50000) + "a", # simple_email_re catastrophic test + ("a" + "." * 1000000) + "a", # trailing_punctuation catastrophic test + "foo@", + "@foo.com", + "foo@.example.com", + "foo@localhost", + "foo@localhost.", + ) + for value in tests: + with self.subTest(value=value): + self.assertEqual(urlize(value), value) diff --git a/testbed/django__django/tests/utils_tests/test_http.py b/testbed/django__django/tests/utils_tests/test_http.py new file mode 100644 index 0000000000000000000000000000000000000000..2290fe85fbbae1da9f0643afe7294ae5fad061ba --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_http.py @@ -0,0 +1,539 @@ +import platform +import unittest +from datetime import datetime, timezone +from unittest import mock + +from django.test import SimpleTestCase +from django.utils.datastructures import MultiValueDict +from django.utils.http import ( + base36_to_int, + content_disposition_header, + escape_leading_slashes, + http_date, + int_to_base36, + is_same_domain, + parse_etags, + parse_header_parameters, + parse_http_date, + quote_etag, + url_has_allowed_host_and_scheme, + urlencode, + urlsafe_base64_decode, + urlsafe_base64_encode, +) + + +class URLEncodeTests(SimpleTestCase): + cannot_encode_none_msg = ( + "Cannot encode None for key 'a' in a query string. Did you mean to " + "pass an empty string or omit the value?" + ) + + def test_tuples(self): + self.assertEqual(urlencode((("a", 1), ("b", 2), ("c", 3))), "a=1&b=2&c=3") + + def test_dict(self): + result = urlencode({"a": 1, "b": 2, "c": 3}) + # Dictionaries are treated as unordered. + self.assertIn( + result, + [ + "a=1&b=2&c=3", + "a=1&c=3&b=2", + "b=2&a=1&c=3", + "b=2&c=3&a=1", + "c=3&a=1&b=2", + "c=3&b=2&a=1", + ], + ) + + def test_dict_containing_sequence_not_doseq(self): + self.assertEqual(urlencode({"a": [1, 2]}, doseq=False), "a=%5B1%2C+2%5D") + + def test_dict_containing_tuple_not_doseq(self): + self.assertEqual(urlencode({"a": (1, 2)}, doseq=False), "a=%281%2C+2%29") + + def test_custom_iterable_not_doseq(self): + class IterableWithStr: + def __str__(self): + return "custom" + + def __iter__(self): + yield from range(0, 3) + + self.assertEqual(urlencode({"a": IterableWithStr()}, doseq=False), "a=custom") + + def test_dict_containing_sequence_doseq(self): + self.assertEqual(urlencode({"a": [1, 2]}, doseq=True), "a=1&a=2") + + def test_dict_containing_empty_sequence_doseq(self): + self.assertEqual(urlencode({"a": []}, doseq=True), "") + + def test_multivaluedict(self): + result = urlencode( + MultiValueDict( + { + "name": ["Adrian", "Simon"], + "position": ["Developer"], + } + ), + doseq=True, + ) + # MultiValueDicts are similarly unordered. + self.assertIn( + result, + [ + "name=Adrian&name=Simon&position=Developer", + "position=Developer&name=Adrian&name=Simon", + ], + ) + + def test_dict_with_bytes_values(self): + self.assertEqual(urlencode({"a": b"abc"}, doseq=True), "a=abc") + + def test_dict_with_sequence_of_bytes(self): + self.assertEqual( + urlencode({"a": [b"spam", b"eggs", b"bacon"]}, doseq=True), + "a=spam&a=eggs&a=bacon", + ) + + def test_dict_with_bytearray(self): + self.assertEqual(urlencode({"a": bytearray(range(2))}, doseq=True), "a=0&a=1") + + def test_generator(self): + self.assertEqual(urlencode({"a": range(2)}, doseq=True), "a=0&a=1") + self.assertEqual(urlencode({"a": range(2)}, doseq=False), "a=range%280%2C+2%29") + + def test_none(self): + with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg): + urlencode({"a": None}) + + def test_none_in_sequence(self): + with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg): + urlencode({"a": [None]}, doseq=True) + + def test_none_in_generator(self): + def gen(): + yield None + + with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg): + urlencode({"a": gen()}, doseq=True) + + +class Base36IntTests(SimpleTestCase): + def test_roundtrip(self): + for n in [0, 1, 1000, 1000000]: + self.assertEqual(n, base36_to_int(int_to_base36(n))) + + def test_negative_input(self): + with self.assertRaisesMessage(ValueError, "Negative base36 conversion input."): + int_to_base36(-1) + + def test_to_base36_errors(self): + for n in ["1", "foo", {1: 2}, (1, 2, 3), 3.141]: + with self.assertRaises(TypeError): + int_to_base36(n) + + def test_invalid_literal(self): + for n in ["#", " "]: + with self.assertRaisesMessage( + ValueError, "invalid literal for int() with base 36: '%s'" % n + ): + base36_to_int(n) + + def test_input_too_large(self): + with self.assertRaisesMessage(ValueError, "Base36 input too large"): + base36_to_int("1" * 14) + + def test_to_int_errors(self): + for n in [123, {1: 2}, (1, 2, 3), 3.141]: + with self.assertRaises(TypeError): + base36_to_int(n) + + def test_values(self): + for n, b36 in [(0, "0"), (1, "1"), (42, "16"), (818469960, "django")]: + self.assertEqual(int_to_base36(n), b36) + self.assertEqual(base36_to_int(b36), n) + + +class URLHasAllowedHostAndSchemeTests(unittest.TestCase): + def test_bad_urls(self): + bad_urls = ( + "http://example.com", + "http:///example.com", + "https://example.com", + "ftp://example.com", + r"\\example.com", + r"\\\example.com", + r"/\\/example.com", + r"\\\example.com", + r"\\example.com", + r"\\//example.com", + r"/\/example.com", + r"\/example.com", + r"/\example.com", + "http:///example.com", + r"http:/\//example.com", + r"http:\/example.com", + r"http:/\example.com", + 'javascript:alert("XSS")', + "\njavascript:alert(x)", + "java\nscript:alert(x)", + "\x08//example.com", + r"http://otherserver\@example.com", + r"http:\\testserver\@example.com", + r"http://testserver\me:pass@example.com", + r"http://testserver\@example.com", + r"http:\\testserver\confirm\me@example.com", + "http:999999999", + "ftp:9999999999", + "\n", + "http://[2001:cdba:0000:0000:0000:0000:3257:9652/", + "http://2001:cdba:0000:0000:0000:0000:3257:9652]/", + ) + for bad_url in bad_urls: + with self.subTest(url=bad_url): + self.assertIs( + url_has_allowed_host_and_scheme( + bad_url, allowed_hosts={"testserver", "testserver2"} + ), + False, + ) + + def test_good_urls(self): + good_urls = ( + "/view/?param=http://example.com", + "/view/?param=https://example.com", + "/view?param=ftp://example.com", + "view/?param=//example.com", + "https://testserver/", + "HTTPS://testserver/", + "//testserver/", + "http://testserver/confirm?email=me@example.com", + "/url%20with%20spaces/", + "path/http:2222222222", + ) + for good_url in good_urls: + with self.subTest(url=good_url): + self.assertIs( + url_has_allowed_host_and_scheme( + good_url, allowed_hosts={"otherserver", "testserver"} + ), + True, + ) + + def test_basic_auth(self): + # Valid basic auth credentials are allowed. + self.assertIs( + url_has_allowed_host_and_scheme( + r"http://user:pass@testserver/", allowed_hosts={"user:pass@testserver"} + ), + True, + ) + + def test_no_allowed_hosts(self): + # A path without host is allowed. + self.assertIs( + url_has_allowed_host_and_scheme( + "/confirm/me@example.com", allowed_hosts=None + ), + True, + ) + # Basic auth without host is not allowed. + self.assertIs( + url_has_allowed_host_and_scheme( + r"http://testserver\@example.com", allowed_hosts=None + ), + False, + ) + + def test_allowed_hosts_str(self): + self.assertIs( + url_has_allowed_host_and_scheme( + "http://good.com/good", allowed_hosts="good.com" + ), + True, + ) + self.assertIs( + url_has_allowed_host_and_scheme( + "http://good.co/evil", allowed_hosts="good.com" + ), + False, + ) + + def test_secure_param_https_urls(self): + secure_urls = ( + "https://example.com/p", + "HTTPS://example.com/p", + "/view/?param=http://example.com", + ) + for url in secure_urls: + with self.subTest(url=url): + self.assertIs( + url_has_allowed_host_and_scheme( + url, allowed_hosts={"example.com"}, require_https=True + ), + True, + ) + + def test_secure_param_non_https_urls(self): + insecure_urls = ( + "http://example.com/p", + "ftp://example.com/p", + "//example.com/p", + ) + for url in insecure_urls: + with self.subTest(url=url): + self.assertIs( + url_has_allowed_host_and_scheme( + url, allowed_hosts={"example.com"}, require_https=True + ), + False, + ) + + +class URLSafeBase64Tests(unittest.TestCase): + def test_roundtrip(self): + bytestring = b"foo" + encoded = urlsafe_base64_encode(bytestring) + decoded = urlsafe_base64_decode(encoded) + self.assertEqual(bytestring, decoded) + + +class IsSameDomainTests(unittest.TestCase): + def test_good(self): + for pair in ( + ("example.com", "example.com"), + ("example.com", ".example.com"), + ("foo.example.com", ".example.com"), + ("example.com:8888", "example.com:8888"), + ("example.com:8888", ".example.com:8888"), + ("foo.example.com:8888", ".example.com:8888"), + ): + self.assertIs(is_same_domain(*pair), True) + + def test_bad(self): + for pair in ( + ("example2.com", "example.com"), + ("foo.example.com", "example.com"), + ("example.com:9999", "example.com:8888"), + ("foo.example.com:8888", ""), + ): + self.assertIs(is_same_domain(*pair), False) + + +class ETagProcessingTests(unittest.TestCase): + def test_parsing(self): + self.assertEqual( + parse_etags(r'"" , "etag", "e\\tag", W/"weak"'), + ['""', '"etag"', r'"e\\tag"', 'W/"weak"'], + ) + self.assertEqual(parse_etags("*"), ["*"]) + + # Ignore RFC 2616 ETags that are invalid according to RFC 9110. + self.assertEqual(parse_etags(r'"etag", "e\"t\"ag"'), ['"etag"']) + + def test_quoting(self): + self.assertEqual(quote_etag("etag"), '"etag"') # unquoted + self.assertEqual(quote_etag('"etag"'), '"etag"') # quoted + self.assertEqual(quote_etag('W/"etag"'), 'W/"etag"') # quoted, weak + + +class HttpDateProcessingTests(unittest.TestCase): + def test_http_date(self): + t = 1167616461.0 + self.assertEqual(http_date(t), "Mon, 01 Jan 2007 01:54:21 GMT") + + def test_parsing_rfc1123(self): + parsed = parse_http_date("Sun, 06 Nov 1994 08:49:37 GMT") + self.assertEqual( + datetime.fromtimestamp(parsed, timezone.utc), + datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc), + ) + + @unittest.skipIf(platform.architecture()[0] == "32bit", "The Year 2038 problem.") + @mock.patch("django.utils.http.datetime.datetime") + def test_parsing_rfc850(self, mocked_datetime): + mocked_datetime.side_effect = datetime + mocked_datetime.now = mock.Mock() + now_1 = datetime(2019, 11, 6, 8, 49, 37, tzinfo=timezone.utc) + now_2 = datetime(2020, 11, 6, 8, 49, 37, tzinfo=timezone.utc) + now_3 = datetime(2048, 11, 6, 8, 49, 37, tzinfo=timezone.utc) + tests = ( + ( + now_1, + "Tuesday, 31-Dec-69 08:49:37 GMT", + datetime(2069, 12, 31, 8, 49, 37, tzinfo=timezone.utc), + ), + ( + now_1, + "Tuesday, 10-Nov-70 08:49:37 GMT", + datetime(1970, 11, 10, 8, 49, 37, tzinfo=timezone.utc), + ), + ( + now_1, + "Sunday, 06-Nov-94 08:49:37 GMT", + datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc), + ), + ( + now_2, + "Wednesday, 31-Dec-70 08:49:37 GMT", + datetime(2070, 12, 31, 8, 49, 37, tzinfo=timezone.utc), + ), + ( + now_2, + "Friday, 31-Dec-71 08:49:37 GMT", + datetime(1971, 12, 31, 8, 49, 37, tzinfo=timezone.utc), + ), + ( + now_3, + "Sunday, 31-Dec-00 08:49:37 GMT", + datetime(2000, 12, 31, 8, 49, 37, tzinfo=timezone.utc), + ), + ( + now_3, + "Friday, 31-Dec-99 08:49:37 GMT", + datetime(1999, 12, 31, 8, 49, 37, tzinfo=timezone.utc), + ), + ) + for now, rfc850str, expected_date in tests: + with self.subTest(rfc850str=rfc850str): + mocked_datetime.now.return_value = now + parsed = parse_http_date(rfc850str) + mocked_datetime.now.assert_called_once_with(tz=timezone.utc) + self.assertEqual( + datetime.fromtimestamp(parsed, timezone.utc), + expected_date, + ) + mocked_datetime.reset_mock() + + def test_parsing_asctime(self): + parsed = parse_http_date("Sun Nov 6 08:49:37 1994") + self.assertEqual( + datetime.fromtimestamp(parsed, timezone.utc), + datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc), + ) + + def test_parsing_asctime_nonascii_digits(self): + """Non-ASCII unicode decimals raise an error.""" + with self.assertRaises(ValueError): + parse_http_date("Sun Nov 6 08:49:37 1994") + with self.assertRaises(ValueError): + parse_http_date("Sun Nov 12 08:49:37 1994") + + def test_parsing_year_less_than_70(self): + parsed = parse_http_date("Sun Nov 6 08:49:37 0037") + self.assertEqual( + datetime.fromtimestamp(parsed, timezone.utc), + datetime(2037, 11, 6, 8, 49, 37, tzinfo=timezone.utc), + ) + + +class EscapeLeadingSlashesTests(unittest.TestCase): + def test(self): + tests = ( + ("//example.com", "/%2Fexample.com"), + ("//", "/%2F"), + ) + for url, expected in tests: + with self.subTest(url=url): + self.assertEqual(escape_leading_slashes(url), expected) + + +class ParseHeaderParameterTests(unittest.TestCase): + def test_basic(self): + tests = [ + ("text/plain", ("text/plain", {})), + ("text/vnd.just.made.this.up ; ", ("text/vnd.just.made.this.up", {})), + ("text/plain;charset=us-ascii", ("text/plain", {"charset": "us-ascii"})), + ( + 'text/plain ; charset="us-ascii"', + ("text/plain", {"charset": "us-ascii"}), + ), + ( + 'text/plain ; charset="us-ascii"; another=opt', + ("text/plain", {"charset": "us-ascii", "another": "opt"}), + ), + ( + 'attachment; filename="silly.txt"', + ("attachment", {"filename": "silly.txt"}), + ), + ( + 'attachment; filename="strange;name"', + ("attachment", {"filename": "strange;name"}), + ), + ( + 'attachment; filename="strange;name";size=123;', + ("attachment", {"filename": "strange;name", "size": "123"}), + ), + ( + 'form-data; name="files"; filename="fo\\"o;bar"', + ("form-data", {"name": "files", "filename": 'fo"o;bar'}), + ), + ] + for header, expected in tests: + with self.subTest(header=header): + self.assertEqual(parse_header_parameters(header), expected) + + def test_rfc2231_parsing(self): + test_data = ( + ( + "Content-Type: application/x-stuff; " + "title*=us-ascii'en-us'This%20is%20%2A%2A%2Afun%2A%2A%2A", + "This is ***fun***", + ), + ( + "Content-Type: application/x-stuff; title*=UTF-8''foo-%c3%a4.html", + "foo-ä.html", + ), + ( + "Content-Type: application/x-stuff; title*=iso-8859-1''foo-%E4.html", + "foo-ä.html", + ), + ) + for raw_line, expected_title in test_data: + parsed = parse_header_parameters(raw_line) + self.assertEqual(parsed[1]["title"], expected_title) + + def test_rfc2231_wrong_title(self): + """ + Test wrongly formatted RFC 2231 headers (missing double single quotes). + Parsing should not crash (#24209). + """ + test_data = ( + ( + "Content-Type: application/x-stuff; " + "title*='This%20is%20%2A%2A%2Afun%2A%2A%2A", + "'This%20is%20%2A%2A%2Afun%2A%2A%2A", + ), + ("Content-Type: application/x-stuff; title*='foo.html", "'foo.html"), + ("Content-Type: application/x-stuff; title*=bar.html", "bar.html"), + ) + for raw_line, expected_title in test_data: + parsed = parse_header_parameters(raw_line) + self.assertEqual(parsed[1]["title"], expected_title) + + +class ContentDispositionHeaderTests(unittest.TestCase): + def test_basic(self): + tests = ( + ((False, None), None), + ((False, "example"), 'inline; filename="example"'), + ((True, None), "attachment"), + ((True, "example"), 'attachment; filename="example"'), + ( + (True, '"example" file\\name'), + 'attachment; filename="\\"example\\" file\\\\name"', + ), + ((True, "espécimen"), "attachment; filename*=utf-8''esp%C3%A9cimen"), + ( + (True, '"espécimen" filename'), + "attachment; filename*=utf-8''%22esp%C3%A9cimen%22%20filename", + ), + ) + + for (is_attachment, filename), expected in tests: + with self.subTest(is_attachment=is_attachment, filename=filename): + self.assertEqual( + content_disposition_header(is_attachment, filename), expected + ) diff --git a/testbed/django__django/tests/utils_tests/test_inspect.py b/testbed/django__django/tests/utils_tests/test_inspect.py new file mode 100644 index 0000000000000000000000000000000000000000..b8359c25087cbb4d85d635f015a274407d344c13 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_inspect.py @@ -0,0 +1,102 @@ +import unittest + +from django.utils import inspect + + +class Person: + def no_arguments(self): + return None + + def one_argument(self, something): + return something + + def just_args(self, *args): + return args + + def all_kinds(self, name, address="home", age=25, *args, **kwargs): + return kwargs + + @classmethod + def cls_all_kinds(cls, name, address="home", age=25, *args, **kwargs): + return kwargs + + +class TestInspectMethods(unittest.TestCase): + def test_get_callable_parameters(self): + self.assertIs( + inspect._get_callable_parameters(Person.no_arguments), + inspect._get_callable_parameters(Person.no_arguments), + ) + self.assertIs( + inspect._get_callable_parameters(Person().no_arguments), + inspect._get_callable_parameters(Person().no_arguments), + ) + + def test_get_func_full_args_no_arguments(self): + self.assertEqual(inspect.get_func_full_args(Person.no_arguments), []) + self.assertEqual(inspect.get_func_full_args(Person().no_arguments), []) + + def test_get_func_full_args_one_argument(self): + self.assertEqual( + inspect.get_func_full_args(Person.one_argument), [("something",)] + ) + self.assertEqual( + inspect.get_func_full_args(Person().one_argument), + [("something",)], + ) + + def test_get_func_full_args_all_arguments_method(self): + arguments = [ + ("name",), + ("address", "home"), + ("age", 25), + ("*args",), + ("**kwargs",), + ] + self.assertEqual(inspect.get_func_full_args(Person.all_kinds), arguments) + self.assertEqual(inspect.get_func_full_args(Person().all_kinds), arguments) + + def test_get_func_full_args_all_arguments_classmethod(self): + arguments = [ + ("name",), + ("address", "home"), + ("age", 25), + ("*args",), + ("**kwargs",), + ] + self.assertEqual(inspect.get_func_full_args(Person.cls_all_kinds), arguments) + self.assertEqual(inspect.get_func_full_args(Person().cls_all_kinds), arguments) + + def test_func_accepts_var_args_has_var_args(self): + self.assertIs(inspect.func_accepts_var_args(Person.just_args), True) + self.assertIs(inspect.func_accepts_var_args(Person().just_args), True) + + def test_func_accepts_var_args_no_var_args(self): + self.assertIs(inspect.func_accepts_var_args(Person.one_argument), False) + self.assertIs(inspect.func_accepts_var_args(Person().one_argument), False) + + def test_method_has_no_args(self): + self.assertIs(inspect.method_has_no_args(Person.no_arguments), True) + self.assertIs(inspect.method_has_no_args(Person().no_arguments), True) + self.assertIs(inspect.method_has_no_args(Person.one_argument), False) + self.assertIs(inspect.method_has_no_args(Person().one_argument), False) + + def test_func_supports_parameter(self): + self.assertIs( + inspect.func_supports_parameter(Person.all_kinds, "address"), True + ) + self.assertIs( + inspect.func_supports_parameter(Person().all_kinds, "address"), + True, + ) + self.assertIs(inspect.func_supports_parameter(Person.all_kinds, "zone"), False) + self.assertIs( + inspect.func_supports_parameter(Person().all_kinds, "zone"), + False, + ) + + def test_func_accepts_kwargs(self): + self.assertIs(inspect.func_accepts_kwargs(Person.just_args), False) + self.assertIs(inspect.func_accepts_kwargs(Person().just_args), False) + self.assertIs(inspect.func_accepts_kwargs(Person.all_kinds), True) + self.assertIs(inspect.func_accepts_kwargs(Person().just_args), False) diff --git a/testbed/django__django/tests/utils_tests/test_ipv6.py b/testbed/django__django/tests/utils_tests/test_ipv6.py new file mode 100644 index 0000000000000000000000000000000000000000..bf78ed91c08f8451802bfd5eae59c3b0f2f9a76c --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_ipv6.py @@ -0,0 +1,66 @@ +import unittest + +from django.utils.ipv6 import clean_ipv6_address, is_valid_ipv6_address + + +class TestUtilsIPv6(unittest.TestCase): + def test_validates_correct_plain_address(self): + self.assertTrue(is_valid_ipv6_address("fe80::223:6cff:fe8a:2e8a")) + self.assertTrue(is_valid_ipv6_address("2a02::223:6cff:fe8a:2e8a")) + self.assertTrue(is_valid_ipv6_address("1::2:3:4:5:6:7")) + self.assertTrue(is_valid_ipv6_address("::")) + self.assertTrue(is_valid_ipv6_address("::a")) + self.assertTrue(is_valid_ipv6_address("2::")) + + def test_validates_correct_with_v4mapping(self): + self.assertTrue(is_valid_ipv6_address("::ffff:254.42.16.14")) + self.assertTrue(is_valid_ipv6_address("::ffff:0a0a:0a0a")) + + def test_validates_incorrect_plain_address(self): + self.assertFalse(is_valid_ipv6_address("foo")) + self.assertFalse(is_valid_ipv6_address("127.0.0.1")) + self.assertFalse(is_valid_ipv6_address("12345::")) + self.assertFalse(is_valid_ipv6_address("1::2:3::4")) + self.assertFalse(is_valid_ipv6_address("1::zzz")) + self.assertFalse(is_valid_ipv6_address("1::2:3:4:5:6:7:8")) + self.assertFalse(is_valid_ipv6_address("1:2")) + self.assertFalse(is_valid_ipv6_address("1:::2")) + self.assertFalse(is_valid_ipv6_address("fe80::223: 6cff:fe8a:2e8a")) + self.assertFalse(is_valid_ipv6_address("2a02::223:6cff :fe8a:2e8a")) + + def test_validates_incorrect_with_v4mapping(self): + self.assertFalse(is_valid_ipv6_address("::ffff:999.42.16.14")) + self.assertFalse(is_valid_ipv6_address("::ffff:zzzz:0a0a")) + # The ::1.2.3.4 format used to be valid but was deprecated + # in RFC 4291 section 2.5.5.1. + self.assertTrue(is_valid_ipv6_address("::254.42.16.14")) + self.assertTrue(is_valid_ipv6_address("::0a0a:0a0a")) + self.assertFalse(is_valid_ipv6_address("::999.42.16.14")) + self.assertFalse(is_valid_ipv6_address("::zzzz:0a0a")) + + def test_cleans_plain_address(self): + self.assertEqual(clean_ipv6_address("DEAD::0:BEEF"), "dead::beef") + self.assertEqual( + clean_ipv6_address("2001:000:a:0000:0:fe:fe:beef"), "2001:0:a::fe:fe:beef" + ) + self.assertEqual( + clean_ipv6_address("2001::a:0000:0:fe:fe:beef"), "2001:0:a::fe:fe:beef" + ) + + def test_cleans_with_v4_mapping(self): + self.assertEqual(clean_ipv6_address("::ffff:0a0a:0a0a"), "::ffff:10.10.10.10") + self.assertEqual(clean_ipv6_address("::ffff:1234:1234"), "::ffff:18.52.18.52") + self.assertEqual(clean_ipv6_address("::ffff:18.52.18.52"), "::ffff:18.52.18.52") + self.assertEqual(clean_ipv6_address("::ffff:0.52.18.52"), "::ffff:0.52.18.52") + self.assertEqual(clean_ipv6_address("::ffff:0.0.0.0"), "::ffff:0.0.0.0") + + def test_unpacks_ipv4(self): + self.assertEqual( + clean_ipv6_address("::ffff:0a0a:0a0a", unpack_ipv4=True), "10.10.10.10" + ) + self.assertEqual( + clean_ipv6_address("::ffff:1234:1234", unpack_ipv4=True), "18.52.18.52" + ) + self.assertEqual( + clean_ipv6_address("::ffff:18.52.18.52", unpack_ipv4=True), "18.52.18.52" + ) diff --git a/testbed/django__django/tests/utils_tests/test_jslex.py b/testbed/django__django/tests/utils_tests/test_jslex.py new file mode 100644 index 0000000000000000000000000000000000000000..ee13eb4d64e6b5aa38de654bef818d838ccbc295 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_jslex.py @@ -0,0 +1,400 @@ +"""Tests for jslex.""" +# originally from https://bitbucket.org/ned/jslex + +from django.test import SimpleTestCase +from django.utils.jslex import JsLexer, prepare_js_for_gettext + + +class JsTokensTest(SimpleTestCase): + LEX_CASES = [ + # ids + ("a ABC $ _ a123", ["id a", "id ABC", "id $", "id _", "id a123"]), + ( + "\\u1234 abc\\u0020 \\u0065_\\u0067", + ["id \\u1234", "id abc\\u0020", "id \\u0065_\\u0067"], + ), + # numbers + ( + "123 1.234 0.123e-3 0 1E+40 1e1 .123", + [ + "dnum 123", + "dnum 1.234", + "dnum 0.123e-3", + "dnum 0", + "dnum 1E+40", + "dnum 1e1", + "dnum .123", + ], + ), + ("0x1 0xabCD 0XABcd", ["hnum 0x1", "hnum 0xabCD", "hnum 0XABcd"]), + ("010 0377 090", ["onum 010", "onum 0377", "dnum 0", "dnum 90"]), + ("0xa123ghi", ["hnum 0xa123", "id ghi"]), + # keywords + ( + "function Function FUNCTION", + ["keyword function", "id Function", "id FUNCTION"], + ), + ( + "const constructor in inherits", + ["keyword const", "id constructor", "keyword in", "id inherits"], + ), + ("true true_enough", ["reserved true", "id true_enough"]), + # strings + (""" 'hello' "hello" """, ["string 'hello'", 'string "hello"']), + ( + r""" 'don\'t' "don\"t" '"' "'" '\'' "\"" """, + [ + r"""string 'don\'t'""", + r'''string "don\"t"''', + r"""string '"'""", + r'''string "'"''', + r"""string '\''""", + r'''string "\""''', + ], + ), + (r'"ƃuıxǝ⅂ ʇdıɹɔsɐʌɐſ\""', [r'string "ƃuıxǝ⅂ ʇdıɹɔsɐʌɐſ\""']), + # comments + ("a//b", ["id a", "linecomment //b"]), + ( + "/****/a/=2//hello", + ["comment /****/", "id a", "punct /=", "dnum 2", "linecomment //hello"], + ), + ( + "/*\n * Header\n */\na=1;", + ["comment /*\n * Header\n */", "id a", "punct =", "dnum 1", "punct ;"], + ), + # punctuation + ("a+++b", ["id a", "punct ++", "punct +", "id b"]), + # regex + (r"a=/a*/,1", ["id a", "punct =", "regex /a*/", "punct ,", "dnum 1"]), + (r"a=/a*[^/]+/,1", ["id a", "punct =", "regex /a*[^/]+/", "punct ,", "dnum 1"]), + (r"a=/a*\[^/,1", ["id a", "punct =", r"regex /a*\[^/", "punct ,", "dnum 1"]), + (r"a=/\//,1", ["id a", "punct =", r"regex /\//", "punct ,", "dnum 1"]), + # next two are from https://www-archive.mozilla.org/js/language/js20-2002-04/rationale/syntax.html#regular-expressions # NOQA + ( + 'for (var x = a in foo && "" || mot ? z:/x:3;x<5;y"', + "punct ||", + "id mot", + "punct ?", + "id z", + "punct :", + "regex /x:3;x<5;y" || mot ? z/x:3;x<5;y"', + "punct ||", + "id mot", + "punct ?", + "id z", + "punct /", + "id x", + "punct :", + "dnum 3", + "punct ;", + "id x", + "punct <", + "dnum 5", + "punct ;", + "id y", + "punct <", + "regex /g/i", + "punct )", + "punct {", + "id xyz", + "punct (", + "id x", + "punct ++", + "punct )", + "punct ;", + "punct }", + ], + ), + # Various "illegal" regexes that are valid according to the std. + ( + r"""/????/, /++++/, /[----]/ """, + ["regex /????/", "punct ,", "regex /++++/", "punct ,", "regex /[----]/"], + ), + # Stress cases from https://stackoverflow.com/questions/5533925/what-javascript-constructs-does-jslex-incorrectly-lex/5573409#5573409 # NOQA + (r"""/\[/""", [r"""regex /\[/"""]), + (r"""/[i]/""", [r"""regex /[i]/"""]), + (r"""/[\]]/""", [r"""regex /[\]]/"""]), + (r"""/a[\]]/""", [r"""regex /a[\]]/"""]), + (r"""/a[\]]b/""", [r"""regex /a[\]]b/"""]), + (r"""/[\]/]/gi""", [r"""regex /[\]/]/gi"""]), + (r"""/\[[^\]]+\]/gi""", [r"""regex /\[[^\]]+\]/gi"""]), + ( + r""" + rexl.re = { + NAME: /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/, + UNQUOTED_LITERAL: /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/, + QUOTED_LITERAL: /^'(?:[^']|'')*'/, + NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/, + SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/ + }; + """, # NOQA + [ + "id rexl", + "punct .", + "id re", + "punct =", + "punct {", + "id NAME", + "punct :", + r"""regex /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/""", + "punct ,", + "id UNQUOTED_LITERAL", + "punct :", + r"""regex /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""", + "punct ,", + "id QUOTED_LITERAL", + "punct :", + r"""regex /^'(?:[^']|'')*'/""", + "punct ,", + "id NUMERIC_LITERAL", + "punct :", + r"""regex /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", + "punct ,", + "id SYMBOL", + "punct :", + r"""regex /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""", # NOQA + "punct }", + "punct ;", + ], + ), + ( + r""" + rexl.re = { + NAME: /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/, + UNQUOTED_LITERAL: /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/, + QUOTED_LITERAL: /^'(?:[^']|'')*'/, + NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/, + SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/ + }; + str = '"'; + """, # NOQA + [ + "id rexl", + "punct .", + "id re", + "punct =", + "punct {", + "id NAME", + "punct :", + r"""regex /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/""", + "punct ,", + "id UNQUOTED_LITERAL", + "punct :", + r"""regex /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""", + "punct ,", + "id QUOTED_LITERAL", + "punct :", + r"""regex /^'(?:[^']|'')*'/""", + "punct ,", + "id NUMERIC_LITERAL", + "punct :", + r"""regex /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", + "punct ,", + "id SYMBOL", + "punct :", + r"""regex /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""", # NOQA + "punct }", + "punct ;", + "id str", + "punct =", + """string '"'""", + "punct ;", + ], + ), + ( + r' this._js = "e.str(\"" + this.value.replace(/\\/g, "\\\\")' + r'.replace(/"/g, "\\\"") + "\")"; ', + [ + "keyword this", + "punct .", + "id _js", + "punct =", + r'''string "e.str(\""''', + "punct +", + "keyword this", + "punct .", + "id value", + "punct .", + "id replace", + "punct (", + r"regex /\\/g", + "punct ,", + r'string "\\\\"', + "punct )", + "punct .", + "id replace", + "punct (", + r'regex /"/g', + "punct ,", + r'string "\\\""', + "punct )", + "punct +", + r'string "\")"', + "punct ;", + ], + ), + ] + + +def make_function(input, toks): + def test_func(self): + lexer = JsLexer() + result = [ + "%s %s" % (name, tok) for name, tok in lexer.lex(input) if name != "ws" + ] + self.assertEqual(result, toks) + + return test_func + + +for i, (input, toks) in enumerate(JsTokensTest.LEX_CASES): + setattr(JsTokensTest, "test_case_%d" % i, make_function(input, toks)) + + +GETTEXT_CASES = ( + ( + r""" + a = 1; /* /[0-9]+/ */ + b = 0x2a0b / 1; // /[0-9]+/ + c = 3; + """, + r""" + a = 1; /* /[0-9]+/ */ + b = 0x2a0b / 1; // /[0-9]+/ + c = 3; + """, + ), + ( + r""" + a = 1.234e-5; + /* + * /[0-9+/ + */ + b = .0123; + """, + r""" + a = 1.234e-5; + /* + * /[0-9+/ + */ + b = .0123; + """, + ), + ( + r""" + x = y / z; + alert(gettext("hello")); + x /= 3; + """, + r""" + x = y / z; + alert(gettext("hello")); + x /= 3; + """, + ), + ( + r""" + s = "Hello \"th/foo/ere\""; + s = 'He\x23llo \'th/foo/ere\''; + s = 'slash quote \", just quote "'; + """, + r""" + s = "Hello \"th/foo/ere\""; + s = "He\x23llo \'th/foo/ere\'"; + s = "slash quote \", just quote \""; + """, + ), + ( + r""" + s = "Line continuation\ + continued /hello/ still the string";/hello/; + """, + r""" + s = "Line continuation\ + continued /hello/ still the string";"REGEX"; + """, + ), + ( + r""" + var regex = /pattern/; + var regex2 = /matter/gm; + var regex3 = /[*/]+/gm.foo("hey"); + """, + r""" + var regex = "REGEX"; + var regex2 = "REGEX"; + var regex3 = "REGEX".foo("hey"); + """, + ), + ( + r""" + for (var x = a in foo && "" || mot ? z:/x:3;x<5;y" || mot ? z/x:3;x<5;y" || mot ? z:"REGEX"/i) {xyz(x++);} + for (var x = a in foo && "" || mot ? z/x:3;x<5;y<"REGEX") {xyz(x++);} + """, + ), + ( + """ + \\u1234xyz = gettext('Hello there'); + """, + r""" + Uu1234xyz = gettext("Hello there"); + """, + ), +) + + +class JsToCForGettextTest(SimpleTestCase): + pass + + +def make_function(js, c): + def test_func(self): + self.assertEqual(prepare_js_for_gettext(js), c) + + return test_func + + +for i, pair in enumerate(GETTEXT_CASES): + setattr(JsToCForGettextTest, "test_case_%d" % i, make_function(*pair)) diff --git a/testbed/django__django/tests/utils_tests/test_lazyobject.py b/testbed/django__django/tests/utils_tests/test_lazyobject.py new file mode 100644 index 0000000000000000000000000000000000000000..134ae77750fb2f3a1106d722980291aba1b85584 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_lazyobject.py @@ -0,0 +1,508 @@ +import copy +import pickle +import sys +import warnings +from unittest import TestCase + +from django.utils.functional import LazyObject, SimpleLazyObject, empty + +from .models import Category, CategoryInfo + + +class Foo: + """ + A simple class with just one attribute. + """ + + foo = "bar" + + def __eq__(self, other): + return self.foo == other.foo + + +class LazyObjectTestCase(TestCase): + def lazy_wrap(self, wrapped_object): + """ + Wrap the given object into a LazyObject + """ + + class AdHocLazyObject(LazyObject): + def _setup(self): + self._wrapped = wrapped_object + + return AdHocLazyObject() + + def test_getattribute(self): + """ + Proxy methods don't exist on wrapped objects unless they're set. + """ + attrs = [ + "__getitem__", + "__setitem__", + "__delitem__", + "__iter__", + "__len__", + "__contains__", + ] + foo = Foo() + obj = self.lazy_wrap(foo) + for attr in attrs: + with self.subTest(attr): + self.assertFalse(hasattr(obj, attr)) + setattr(foo, attr, attr) + obj_with_attr = self.lazy_wrap(foo) + self.assertTrue(hasattr(obj_with_attr, attr)) + self.assertEqual(getattr(obj_with_attr, attr), attr) + + def test_getattr(self): + obj = self.lazy_wrap(Foo()) + self.assertEqual(obj.foo, "bar") + + def test_getattr_falsey(self): + class Thing: + def __getattr__(self, key): + return [] + + obj = self.lazy_wrap(Thing()) + self.assertEqual(obj.main, []) + + def test_setattr(self): + obj = self.lazy_wrap(Foo()) + obj.foo = "BAR" + obj.bar = "baz" + self.assertEqual(obj.foo, "BAR") + self.assertEqual(obj.bar, "baz") + + def test_setattr2(self): + # Same as test_setattr but in reversed order + obj = self.lazy_wrap(Foo()) + obj.bar = "baz" + obj.foo = "BAR" + self.assertEqual(obj.foo, "BAR") + self.assertEqual(obj.bar, "baz") + + def test_delattr(self): + obj = self.lazy_wrap(Foo()) + obj.bar = "baz" + self.assertEqual(obj.bar, "baz") + del obj.bar + with self.assertRaises(AttributeError): + obj.bar + + def test_cmp(self): + obj1 = self.lazy_wrap("foo") + obj2 = self.lazy_wrap("bar") + obj3 = self.lazy_wrap("foo") + self.assertEqual(obj1, "foo") + self.assertEqual(obj1, obj3) + self.assertNotEqual(obj1, obj2) + self.assertNotEqual(obj1, "bar") + + def test_lt(self): + obj1 = self.lazy_wrap(1) + obj2 = self.lazy_wrap(2) + self.assertLess(obj1, obj2) + + def test_gt(self): + obj1 = self.lazy_wrap(1) + obj2 = self.lazy_wrap(2) + self.assertGreater(obj2, obj1) + + def test_bytes(self): + obj = self.lazy_wrap(b"foo") + self.assertEqual(bytes(obj), b"foo") + + def test_text(self): + obj = self.lazy_wrap("foo") + self.assertEqual(str(obj), "foo") + + def test_bool(self): + # Refs #21840 + for f in [False, 0, (), {}, [], None, set()]: + self.assertFalse(self.lazy_wrap(f)) + for t in [True, 1, (1,), {1: 2}, [1], object(), {1}]: + self.assertTrue(t) + + def test_dir(self): + obj = self.lazy_wrap("foo") + self.assertEqual(dir(obj), dir("foo")) + + def test_len(self): + for seq in ["asd", [1, 2, 3], {"a": 1, "b": 2, "c": 3}]: + obj = self.lazy_wrap(seq) + self.assertEqual(len(obj), 3) + + def test_class(self): + self.assertIsInstance(self.lazy_wrap(42), int) + + class Bar(Foo): + pass + + self.assertIsInstance(self.lazy_wrap(Bar()), Foo) + + def test_hash(self): + obj = self.lazy_wrap("foo") + d = {obj: "bar"} + self.assertIn("foo", d) + self.assertEqual(d["foo"], "bar") + + def test_contains(self): + test_data = [ + ("c", "abcde"), + (2, [1, 2, 3]), + ("a", {"a": 1, "b": 2, "c": 3}), + (2, {1, 2, 3}), + ] + for needle, haystack in test_data: + self.assertIn(needle, self.lazy_wrap(haystack)) + + # __contains__ doesn't work when the haystack is a string and the + # needle a LazyObject. + for needle_haystack in test_data[1:]: + self.assertIn(self.lazy_wrap(needle), haystack) + self.assertIn(self.lazy_wrap(needle), self.lazy_wrap(haystack)) + + def test_getitem(self): + obj_list = self.lazy_wrap([1, 2, 3]) + obj_dict = self.lazy_wrap({"a": 1, "b": 2, "c": 3}) + + self.assertEqual(obj_list[0], 1) + self.assertEqual(obj_list[-1], 3) + self.assertEqual(obj_list[1:2], [2]) + + self.assertEqual(obj_dict["b"], 2) + + with self.assertRaises(IndexError): + obj_list[3] + + with self.assertRaises(KeyError): + obj_dict["f"] + + def test_setitem(self): + obj_list = self.lazy_wrap([1, 2, 3]) + obj_dict = self.lazy_wrap({"a": 1, "b": 2, "c": 3}) + + obj_list[0] = 100 + self.assertEqual(obj_list, [100, 2, 3]) + obj_list[1:2] = [200, 300, 400] + self.assertEqual(obj_list, [100, 200, 300, 400, 3]) + + obj_dict["a"] = 100 + obj_dict["d"] = 400 + self.assertEqual(obj_dict, {"a": 100, "b": 2, "c": 3, "d": 400}) + + def test_delitem(self): + obj_list = self.lazy_wrap([1, 2, 3]) + obj_dict = self.lazy_wrap({"a": 1, "b": 2, "c": 3}) + + del obj_list[-1] + del obj_dict["c"] + self.assertEqual(obj_list, [1, 2]) + self.assertEqual(obj_dict, {"a": 1, "b": 2}) + + with self.assertRaises(IndexError): + del obj_list[3] + + with self.assertRaises(KeyError): + del obj_dict["f"] + + def test_iter(self): + # Tests whether an object's custom `__iter__` method is being + # used when iterating over it. + + class IterObject: + def __init__(self, values): + self.values = values + + def __iter__(self): + return iter(self.values) + + original_list = ["test", "123"] + self.assertEqual(list(self.lazy_wrap(IterObject(original_list))), original_list) + + def test_pickle(self): + # See ticket #16563 + obj = self.lazy_wrap(Foo()) + obj.bar = "baz" + pickled = pickle.dumps(obj) + unpickled = pickle.loads(pickled) + self.assertIsInstance(unpickled, Foo) + self.assertEqual(unpickled, obj) + self.assertEqual(unpickled.foo, obj.foo) + self.assertEqual(unpickled.bar, obj.bar) + + # Test copying lazy objects wrapping both builtin types and user-defined + # classes since a lot of the relevant code does __dict__ manipulation and + # builtin types don't have __dict__. + + def test_copy_list(self): + # Copying a list works and returns the correct objects. + lst = [1, 2, 3] + + obj = self.lazy_wrap(lst) + len(lst) # forces evaluation + obj2 = copy.copy(obj) + + self.assertIsNot(obj, obj2) + self.assertIsInstance(obj2, list) + self.assertEqual(obj2, [1, 2, 3]) + + def test_copy_list_no_evaluation(self): + # Copying a list doesn't force evaluation. + lst = [1, 2, 3] + + obj = self.lazy_wrap(lst) + obj2 = copy.copy(obj) + + self.assertIsNot(obj, obj2) + self.assertIs(obj._wrapped, empty) + self.assertIs(obj2._wrapped, empty) + + def test_copy_class(self): + # Copying a class works and returns the correct objects. + foo = Foo() + + obj = self.lazy_wrap(foo) + str(foo) # forces evaluation + obj2 = copy.copy(obj) + + self.assertIsNot(obj, obj2) + self.assertIsInstance(obj2, Foo) + self.assertEqual(obj2, Foo()) + + def test_copy_class_no_evaluation(self): + # Copying a class doesn't force evaluation. + foo = Foo() + + obj = self.lazy_wrap(foo) + obj2 = copy.copy(obj) + + self.assertIsNot(obj, obj2) + self.assertIs(obj._wrapped, empty) + self.assertIs(obj2._wrapped, empty) + + def test_deepcopy_list(self): + # Deep copying a list works and returns the correct objects. + lst = [1, 2, 3] + + obj = self.lazy_wrap(lst) + len(lst) # forces evaluation + obj2 = copy.deepcopy(obj) + + self.assertIsNot(obj, obj2) + self.assertIsInstance(obj2, list) + self.assertEqual(obj2, [1, 2, 3]) + + def test_deepcopy_list_no_evaluation(self): + # Deep copying doesn't force evaluation. + lst = [1, 2, 3] + + obj = self.lazy_wrap(lst) + obj2 = copy.deepcopy(obj) + + self.assertIsNot(obj, obj2) + self.assertIs(obj._wrapped, empty) + self.assertIs(obj2._wrapped, empty) + + def test_deepcopy_class(self): + # Deep copying a class works and returns the correct objects. + foo = Foo() + + obj = self.lazy_wrap(foo) + str(foo) # forces evaluation + obj2 = copy.deepcopy(obj) + + self.assertIsNot(obj, obj2) + self.assertIsInstance(obj2, Foo) + self.assertEqual(obj2, Foo()) + + def test_deepcopy_class_no_evaluation(self): + # Deep copying doesn't force evaluation. + foo = Foo() + + obj = self.lazy_wrap(foo) + obj2 = copy.deepcopy(obj) + + self.assertIsNot(obj, obj2) + self.assertIs(obj._wrapped, empty) + self.assertIs(obj2._wrapped, empty) + + +class SimpleLazyObjectTestCase(LazyObjectTestCase): + # By inheriting from LazyObjectTestCase and redefining the lazy_wrap() + # method which all testcases use, we get to make sure all behaviors + # tested in the parent testcase also apply to SimpleLazyObject. + def lazy_wrap(self, wrapped_object): + return SimpleLazyObject(lambda: wrapped_object) + + def test_repr(self): + # First, for an unevaluated SimpleLazyObject + obj = self.lazy_wrap(42) + # __repr__ contains __repr__ of setup function and does not evaluate + # the SimpleLazyObject + self.assertRegex(repr(obj), "^") + + def test_add(self): + obj1 = self.lazy_wrap(1) + self.assertEqual(obj1 + 1, 2) + obj2 = self.lazy_wrap(2) + self.assertEqual(obj2 + obj1, 3) + self.assertEqual(obj1 + obj2, 3) + + def test_radd(self): + obj1 = self.lazy_wrap(1) + self.assertEqual(1 + obj1, 2) + + def test_trace(self): + # See ticket #19456 + old_trace_func = sys.gettrace() + try: + + def trace_func(frame, event, arg): + frame.f_locals["self"].__class__ + if old_trace_func is not None: + old_trace_func(frame, event, arg) + + sys.settrace(trace_func) + self.lazy_wrap(None) + finally: + sys.settrace(old_trace_func) + + def test_none(self): + i = [0] + + def f(): + i[0] += 1 + return None + + x = SimpleLazyObject(f) + self.assertEqual(str(x), "None") + self.assertEqual(i, [1]) + self.assertEqual(str(x), "None") + self.assertEqual(i, [1]) + + def test_dict(self): + # See ticket #18447 + lazydict = SimpleLazyObject(lambda: {"one": 1}) + self.assertEqual(lazydict["one"], 1) + lazydict["one"] = -1 + self.assertEqual(lazydict["one"], -1) + self.assertIn("one", lazydict) + self.assertNotIn("two", lazydict) + self.assertEqual(len(lazydict), 1) + del lazydict["one"] + with self.assertRaises(KeyError): + lazydict["one"] + + def test_list_set(self): + lazy_list = SimpleLazyObject(lambda: [1, 2, 3, 4, 5]) + lazy_set = SimpleLazyObject(lambda: {1, 2, 3, 4}) + self.assertIn(1, lazy_list) + self.assertIn(1, lazy_set) + self.assertNotIn(6, lazy_list) + self.assertNotIn(6, lazy_set) + self.assertEqual(len(lazy_list), 5) + self.assertEqual(len(lazy_set), 4) + + +class BaseBaz: + """ + A base class with a funky __reduce__ method, meant to simulate the + __reduce__ method of Model, which sets self._django_version. + """ + + def __init__(self): + self.baz = "wrong" + + def __reduce__(self): + self.baz = "right" + return super().__reduce__() + + def __eq__(self, other): + if self.__class__ != other.__class__: + return False + for attr in ["bar", "baz", "quux"]: + if hasattr(self, attr) != hasattr(other, attr): + return False + elif getattr(self, attr, None) != getattr(other, attr, None): + return False + return True + + +class Baz(BaseBaz): + """ + A class that inherits from BaseBaz and has its own __reduce_ex__ method. + """ + + def __init__(self, bar): + self.bar = bar + super().__init__() + + def __reduce_ex__(self, proto): + self.quux = "quux" + return super().__reduce_ex__(proto) + + +class BazProxy(Baz): + """ + A class that acts as a proxy for Baz. It does some scary mucking about with + dicts, which simulates some crazy things that people might do with + e.g. proxy models. + """ + + def __init__(self, baz): + self.__dict__ = baz.__dict__ + self._baz = baz + # Grandparent super + super(BaseBaz, self).__init__() + + +class SimpleLazyObjectPickleTestCase(TestCase): + """ + Regression test for pickling a SimpleLazyObject wrapping a model (#25389). + Also covers other classes with a custom __reduce__ method. + """ + + def test_pickle_with_reduce(self): + """ + Test in a fairly synthetic setting. + """ + # Test every pickle protocol available + for protocol in range(pickle.HIGHEST_PROTOCOL + 1): + lazy_objs = [ + SimpleLazyObject(lambda: BaseBaz()), + SimpleLazyObject(lambda: Baz(1)), + SimpleLazyObject(lambda: BazProxy(Baz(2))), + ] + for obj in lazy_objs: + pickled = pickle.dumps(obj, protocol) + unpickled = pickle.loads(pickled) + self.assertEqual(unpickled, obj) + self.assertEqual(unpickled.baz, "right") + + def test_pickle_model(self): + """ + Test on an actual model, based on the report in #25426. + """ + category = Category.objects.create(name="thing1") + CategoryInfo.objects.create(category=category) + # Test every pickle protocol available + for protocol in range(pickle.HIGHEST_PROTOCOL + 1): + lazy_category = SimpleLazyObject(lambda: category) + # Test both if we accessed a field on the model and if we didn't. + lazy_category.categoryinfo + lazy_category_2 = SimpleLazyObject(lambda: category) + with warnings.catch_warnings(record=True) as recorded: + self.assertEqual( + pickle.loads(pickle.dumps(lazy_category, protocol)), category + ) + self.assertEqual( + pickle.loads(pickle.dumps(lazy_category_2, protocol)), category + ) + # Assert that there were no warnings. + self.assertEqual(len(recorded), 0) diff --git a/testbed/django__django/tests/utils_tests/test_lorem_ipsum.py b/testbed/django__django/tests/utils_tests/test_lorem_ipsum.py new file mode 100644 index 0000000000000000000000000000000000000000..deda09c7176bc608ea218a53c48124f8c971f16c --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_lorem_ipsum.py @@ -0,0 +1,127 @@ +import unittest +from unittest import mock + +from django.utils.lorem_ipsum import paragraph, paragraphs, sentence, words + + +class LoremIpsumTests(unittest.TestCase): + def test_negative_words(self): + """words(n) returns n + 19 words, even if n is negative.""" + self.assertEqual( + words(-5), + "lorem ipsum dolor sit amet consectetur adipisicing elit sed do " + "eiusmod tempor incididunt ut", + ) + + def test_same_or_less_common_words(self): + """words(n) for n < 19.""" + self.assertEqual(words(7), "lorem ipsum dolor sit amet consectetur adipisicing") + + def test_common_words_in_string(self): + """words(n) starts with the 19 standard lorem ipsum words for n > 19.""" + self.assertTrue( + words(25).startswith( + "lorem ipsum dolor sit amet consectetur adipisicing elit sed " + "do eiusmod tempor incididunt ut labore et dolore magna aliqua" + ) + ) + + def test_more_words_than_common(self): + """words(n) returns n words for n > 19.""" + self.assertEqual(len(words(25).split()), 25) + + def test_common_large_number_of_words(self): + """words(n) has n words when n is greater than len(WORDS).""" + self.assertEqual(len(words(500).split()), 500) + + @mock.patch("django.utils.lorem_ipsum.random.sample") + def test_not_common_words(self, mock_sample): + """words(n, common=False) returns random words.""" + mock_sample.return_value = ["exercitationem", "perferendis"] + self.assertEqual(words(2, common=False), "exercitationem perferendis") + + def test_sentence_starts_with_capital(self): + """A sentence starts with a capital letter.""" + self.assertTrue(sentence()[0].isupper()) + + @mock.patch("django.utils.lorem_ipsum.random.sample") + @mock.patch("django.utils.lorem_ipsum.random.choice") + @mock.patch("django.utils.lorem_ipsum.random.randint") + def test_sentence(self, mock_randint, mock_choice, mock_sample): + """ + Sentences are built using some number of phrases and a set of words. + """ + mock_randint.return_value = 2 # Use two phrases. + mock_sample.return_value = ["exercitationem", "perferendis"] + mock_choice.return_value = "?" + value = sentence() + self.assertEqual(mock_randint.call_count, 3) + self.assertEqual(mock_sample.call_count, 2) + self.assertEqual(mock_choice.call_count, 1) + self.assertEqual( + value, "Exercitationem perferendis, exercitationem perferendis?" + ) + + @mock.patch("django.utils.lorem_ipsum.random.choice") + def test_sentence_ending(self, mock_choice): + """Sentences end with a question mark or a period.""" + mock_choice.return_value = "?" + self.assertIn(sentence()[-1], "?") + mock_choice.return_value = "." + self.assertIn(sentence()[-1], ".") + + @mock.patch("django.utils.lorem_ipsum.random.sample") + @mock.patch("django.utils.lorem_ipsum.random.choice") + @mock.patch("django.utils.lorem_ipsum.random.randint") + def test_paragraph(self, mock_paragraph_randint, mock_choice, mock_sample): + """paragraph() generates a single paragraph.""" + # Make creating 2 sentences use 2 phrases. + mock_paragraph_randint.return_value = 2 + mock_sample.return_value = ["exercitationem", "perferendis"] + mock_choice.return_value = "." + value = paragraph() + self.assertEqual(mock_paragraph_randint.call_count, 7) + self.assertEqual( + value, + ( + "Exercitationem perferendis, exercitationem perferendis. " + "Exercitationem perferendis, exercitationem perferendis." + ), + ) + + @mock.patch("django.utils.lorem_ipsum.random.sample") + @mock.patch("django.utils.lorem_ipsum.random.choice") + @mock.patch("django.utils.lorem_ipsum.random.randint") + def test_paragraphs_not_common(self, mock_randint, mock_choice, mock_sample): + """ + paragraphs(1, common=False) generating one paragraph that's not the + COMMON_P paragraph. + """ + # Make creating 2 sentences use 2 phrases. + mock_randint.return_value = 2 + mock_sample.return_value = ["exercitationem", "perferendis"] + mock_choice.return_value = "." + self.assertEqual( + paragraphs(1, common=False), + [ + "Exercitationem perferendis, exercitationem perferendis. " + "Exercitationem perferendis, exercitationem perferendis." + ], + ) + self.assertEqual(mock_randint.call_count, 7) + + def test_paragraphs(self): + """paragraphs(1) uses the COMMON_P paragraph.""" + self.assertEqual( + paragraphs(1), + [ + "Lorem ipsum dolor sit amet, consectetur adipisicing elit, " + "sed do eiusmod tempor incididunt ut labore et dolore magna " + "aliqua. Ut enim ad minim veniam, quis nostrud exercitation " + "ullamco laboris nisi ut aliquip ex ea commodo consequat. " + "Duis aute irure dolor in reprehenderit in voluptate velit " + "esse cillum dolore eu fugiat nulla pariatur. Excepteur sint " + "occaecat cupidatat non proident, sunt in culpa qui officia " + "deserunt mollit anim id est laborum." + ], + ) diff --git a/testbed/django__django/tests/utils_tests/test_module/__init__.py b/testbed/django__django/tests/utils_tests/test_module/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d8a5fe2ed9f5921b1a7f92f1cf6c40ffc3a454b7 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_module/__init__.py @@ -0,0 +1,5 @@ +class SiteMock: + _registry = {} + + +site = SiteMock() diff --git a/testbed/django__django/tests/utils_tests/test_module/__main__.py b/testbed/django__django/tests/utils_tests/test_module/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/utils_tests/test_module/another_bad_module.py b/testbed/django__django/tests/utils_tests/test_module/another_bad_module.py new file mode 100644 index 0000000000000000000000000000000000000000..769bb2b246f69b3899a041ffb4a938b6162e9908 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_module/another_bad_module.py @@ -0,0 +1,11 @@ +from . import site + +content = "Another Bad Module" + +site._registry.update( + { + "foo": "bar", + } +) + +raise Exception("Some random exception.") diff --git a/testbed/django__django/tests/utils_tests/test_module/another_good_module.py b/testbed/django__django/tests/utils_tests/test_module/another_good_module.py new file mode 100644 index 0000000000000000000000000000000000000000..7ab8224f5f08a5fe48333acc969f3e83d6ea7f26 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_module/another_good_module.py @@ -0,0 +1,9 @@ +from . import site + +content = "Another Good Module" + +site._registry.update( + { + "lorem": "ipsum", + } +) diff --git a/testbed/django__django/tests/utils_tests/test_module/bad_module.py b/testbed/django__django/tests/utils_tests/test_module/bad_module.py new file mode 100644 index 0000000000000000000000000000000000000000..728799ef2678313d767a575a0276596bff1ad230 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_module/bad_module.py @@ -0,0 +1,3 @@ +import a_package_name_that_does_not_exist # NOQA + +content = "Bad Module" diff --git a/testbed/django__django/tests/utils_tests/test_module/child_module/__init__.py b/testbed/django__django/tests/utils_tests/test_module/child_module/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/utils_tests/test_module/child_module/grandchild_module.py b/testbed/django__django/tests/utils_tests/test_module/child_module/grandchild_module.py new file mode 100644 index 0000000000000000000000000000000000000000..09bd1c4b7fd19507f2e974f5f0a3b23c27dade1c --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_module/child_module/grandchild_module.py @@ -0,0 +1 @@ +content = "Grandchild Module" diff --git a/testbed/django__django/tests/utils_tests/test_module/good_module.py b/testbed/django__django/tests/utils_tests/test_module/good_module.py new file mode 100644 index 0000000000000000000000000000000000000000..90a0388554eb5c6f46414b600ac643ad567aa3cc --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_module/good_module.py @@ -0,0 +1 @@ +content = "Good Module" diff --git a/testbed/django__django/tests/utils_tests/test_module/main_module.py b/testbed/django__django/tests/utils_tests/test_module/main_module.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/utils_tests/test_module_loading.py b/testbed/django__django/tests/utils_tests/test_module_loading.py new file mode 100644 index 0000000000000000000000000000000000000000..80ada3abd7889e426876a86b804fe7d8064f90c1 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_module_loading.py @@ -0,0 +1,228 @@ +import os +import sys +import unittest +from importlib import import_module +from zipimport import zipimporter + +from django.test import SimpleTestCase, modify_settings +from django.test.utils import extend_sys_path +from django.utils.module_loading import ( + autodiscover_modules, + import_string, + module_has_submodule, +) + + +class DefaultLoader(unittest.TestCase): + def test_loader(self): + "Normal module existence can be tested" + test_module = import_module("utils_tests.test_module") + test_no_submodule = import_module("utils_tests.test_no_submodule") + + # An importable child + self.assertTrue(module_has_submodule(test_module, "good_module")) + mod = import_module("utils_tests.test_module.good_module") + self.assertEqual(mod.content, "Good Module") + + # A child that exists, but will generate an import error if loaded + self.assertTrue(module_has_submodule(test_module, "bad_module")) + with self.assertRaises(ImportError): + import_module("utils_tests.test_module.bad_module") + + # A child that doesn't exist + self.assertFalse(module_has_submodule(test_module, "no_such_module")) + with self.assertRaises(ImportError): + import_module("utils_tests.test_module.no_such_module") + + # A child that doesn't exist, but is the name of a package on the path + self.assertFalse(module_has_submodule(test_module, "django")) + with self.assertRaises(ImportError): + import_module("utils_tests.test_module.django") + + # Don't be confused by caching of import misses + import types # NOQA: causes attempted import of utils_tests.types + + self.assertFalse(module_has_submodule(sys.modules["utils_tests"], "types")) + + # A module which doesn't have a __path__ (so no submodules) + self.assertFalse(module_has_submodule(test_no_submodule, "anything")) + with self.assertRaises(ImportError): + import_module("utils_tests.test_no_submodule.anything") + + def test_has_sumbodule_with_dotted_path(self): + """Nested module existence can be tested.""" + test_module = import_module("utils_tests.test_module") + # A grandchild that exists. + self.assertIs( + module_has_submodule(test_module, "child_module.grandchild_module"), True + ) + # A grandchild that doesn't exist. + self.assertIs( + module_has_submodule(test_module, "child_module.no_such_module"), False + ) + # A grandchild whose parent doesn't exist. + self.assertIs( + module_has_submodule(test_module, "no_such_module.grandchild_module"), False + ) + # A grandchild whose parent is not a package. + self.assertIs( + module_has_submodule(test_module, "good_module.no_such_module"), False + ) + + +class EggLoader(unittest.TestCase): + def setUp(self): + self.egg_dir = "%s/eggs" % os.path.dirname(__file__) + + def tearDown(self): + sys.path_importer_cache.clear() + + sys.modules.pop("egg_module.sub1.sub2.bad_module", None) + sys.modules.pop("egg_module.sub1.sub2.good_module", None) + sys.modules.pop("egg_module.sub1.sub2", None) + sys.modules.pop("egg_module.sub1", None) + sys.modules.pop("egg_module.bad_module", None) + sys.modules.pop("egg_module.good_module", None) + sys.modules.pop("egg_module", None) + + def test_shallow_loader(self): + "Module existence can be tested inside eggs" + egg_name = "%s/test_egg.egg" % self.egg_dir + with extend_sys_path(egg_name): + egg_module = import_module("egg_module") + + # An importable child + self.assertTrue(module_has_submodule(egg_module, "good_module")) + mod = import_module("egg_module.good_module") + self.assertEqual(mod.content, "Good Module") + + # A child that exists, but will generate an import error if loaded + self.assertTrue(module_has_submodule(egg_module, "bad_module")) + with self.assertRaises(ImportError): + import_module("egg_module.bad_module") + + # A child that doesn't exist + self.assertFalse(module_has_submodule(egg_module, "no_such_module")) + with self.assertRaises(ImportError): + import_module("egg_module.no_such_module") + + def test_deep_loader(self): + "Modules deep inside an egg can still be tested for existence" + egg_name = "%s/test_egg.egg" % self.egg_dir + with extend_sys_path(egg_name): + egg_module = import_module("egg_module.sub1.sub2") + + # An importable child + self.assertTrue(module_has_submodule(egg_module, "good_module")) + mod = import_module("egg_module.sub1.sub2.good_module") + self.assertEqual(mod.content, "Deep Good Module") + + # A child that exists, but will generate an import error if loaded + self.assertTrue(module_has_submodule(egg_module, "bad_module")) + with self.assertRaises(ImportError): + import_module("egg_module.sub1.sub2.bad_module") + + # A child that doesn't exist + self.assertFalse(module_has_submodule(egg_module, "no_such_module")) + with self.assertRaises(ImportError): + import_module("egg_module.sub1.sub2.no_such_module") + + +class ModuleImportTests(SimpleTestCase): + def test_import_string(self): + cls = import_string("django.utils.module_loading.import_string") + self.assertEqual(cls, import_string) + + # Test exceptions raised + with self.assertRaises(ImportError): + import_string("no_dots_in_path") + msg = 'Module "utils_tests" does not define a "unexistent" attribute' + with self.assertRaisesMessage(ImportError, msg): + import_string("utils_tests.unexistent") + + +@modify_settings(INSTALLED_APPS={"append": "utils_tests.test_module"}) +class AutodiscoverModulesTestCase(SimpleTestCase): + def tearDown(self): + sys.path_importer_cache.clear() + + sys.modules.pop("utils_tests.test_module.another_bad_module", None) + sys.modules.pop("utils_tests.test_module.another_good_module", None) + sys.modules.pop("utils_tests.test_module.bad_module", None) + sys.modules.pop("utils_tests.test_module.good_module", None) + sys.modules.pop("utils_tests.test_module", None) + + def test_autodiscover_modules_found(self): + autodiscover_modules("good_module") + + def test_autodiscover_modules_not_found(self): + autodiscover_modules("missing_module") + + def test_autodiscover_modules_found_but_bad_module(self): + with self.assertRaisesMessage( + ImportError, "No module named 'a_package_name_that_does_not_exist'" + ): + autodiscover_modules("bad_module") + + def test_autodiscover_modules_several_one_bad_module(self): + with self.assertRaisesMessage( + ImportError, "No module named 'a_package_name_that_does_not_exist'" + ): + autodiscover_modules("good_module", "bad_module") + + def test_autodiscover_modules_several_found(self): + autodiscover_modules("good_module", "another_good_module") + + def test_autodiscover_modules_several_found_with_registry(self): + from .test_module import site + + autodiscover_modules("good_module", "another_good_module", register_to=site) + self.assertEqual(site._registry, {"lorem": "ipsum"}) + + def test_validate_registry_keeps_intact(self): + from .test_module import site + + with self.assertRaisesMessage(Exception, "Some random exception."): + autodiscover_modules("another_bad_module", register_to=site) + self.assertEqual(site._registry, {}) + + def test_validate_registry_resets_after_erroneous_module(self): + from .test_module import site + + with self.assertRaisesMessage(Exception, "Some random exception."): + autodiscover_modules( + "another_good_module", "another_bad_module", register_to=site + ) + self.assertEqual(site._registry, {"lorem": "ipsum"}) + + def test_validate_registry_resets_after_missing_module(self): + from .test_module import site + + autodiscover_modules( + "does_not_exist", "another_good_module", "does_not_exist2", register_to=site + ) + self.assertEqual(site._registry, {"lorem": "ipsum"}) + + +class TestFinder: + def __init__(self, *args, **kwargs): + self.importer = zipimporter(*args, **kwargs) + + def find_spec(self, path, target=None): + return self.importer.find_spec(path, target) + + +class CustomLoader(EggLoader): + """The Custom Loader test is exactly the same as the EggLoader, but + it uses a custom defined Loader class. Although the EggLoader combines both + functions into one class, this isn't required. + """ + + def setUp(self): + super().setUp() + sys.path_hooks.insert(0, TestFinder) + sys.path_importer_cache.clear() + + def tearDown(self): + super().tearDown() + sys.path_hooks.pop(0) diff --git a/testbed/django__django/tests/utils_tests/test_no_submodule.py b/testbed/django__django/tests/utils_tests/test_no_submodule.py new file mode 100644 index 0000000000000000000000000000000000000000..3c424ac788d30c5c7452560c4e54363d608265ed --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_no_submodule.py @@ -0,0 +1 @@ +# Used to test for modules which don't have submodules. diff --git a/testbed/django__django/tests/utils_tests/test_numberformat.py b/testbed/django__django/tests/utils_tests/test_numberformat.py new file mode 100644 index 0000000000000000000000000000000000000000..3c7620c74442b6359c63bf1be75904d0b8493368 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_numberformat.py @@ -0,0 +1,178 @@ +from decimal import Decimal +from sys import float_info + +from django.test import SimpleTestCase +from django.utils.numberformat import format as nformat + + +class TestNumberFormat(SimpleTestCase): + def test_format_number(self): + self.assertEqual(nformat(1234, "."), "1234") + self.assertEqual(nformat(1234.2, "."), "1234.2") + self.assertEqual(nformat(1234, ".", decimal_pos=2), "1234.00") + self.assertEqual(nformat(1234, ".", grouping=2, thousand_sep=","), "1234") + self.assertEqual( + nformat(1234, ".", grouping=2, thousand_sep=",", force_grouping=True), + "12,34", + ) + self.assertEqual(nformat(-1234.33, ".", decimal_pos=1), "-1234.3") + # The use_l10n parameter can force thousand grouping behavior. + with self.settings(USE_THOUSAND_SEPARATOR=True): + self.assertEqual( + nformat(1234, ".", grouping=3, thousand_sep=",", use_l10n=False), "1234" + ) + self.assertEqual( + nformat(1234, ".", grouping=3, thousand_sep=",", use_l10n=True), "1,234" + ) + + def test_format_string(self): + self.assertEqual(nformat("1234", "."), "1234") + self.assertEqual(nformat("1234.2", "."), "1234.2") + self.assertEqual(nformat("1234", ".", decimal_pos=2), "1234.00") + self.assertEqual(nformat("1234", ".", grouping=2, thousand_sep=","), "1234") + self.assertEqual( + nformat("1234", ".", grouping=2, thousand_sep=",", force_grouping=True), + "12,34", + ) + self.assertEqual(nformat("-1234.33", ".", decimal_pos=1), "-1234.3") + self.assertEqual( + nformat( + "10000", ".", grouping=3, thousand_sep="comma", force_grouping=True + ), + "10comma000", + ) + + def test_large_number(self): + most_max = ( + "{}179769313486231570814527423731704356798070567525844996" + "598917476803157260780028538760589558632766878171540458953" + "514382464234321326889464182768467546703537516986049910576" + "551282076245490090389328944075868508455133942304583236903" + "222948165808559332123348274797826204144723168738177180919" + "29988125040402618412485836{}" + ) + most_max2 = ( + "{}35953862697246314162905484746340871359614113505168999" + "31978349536063145215600570775211791172655337563430809179" + "07028764928468642653778928365536935093407075033972099821" + "15310256415249098018077865788815173701691026788460916647" + "38064458963316171186642466965495956524082894463374763543" + "61838599762500808052368249716736" + ) + int_max = int(float_info.max) + self.assertEqual(nformat(int_max, "."), most_max.format("", "8")) + self.assertEqual(nformat(int_max + 1, "."), most_max.format("", "9")) + self.assertEqual(nformat(int_max * 2, "."), most_max2.format("")) + self.assertEqual(nformat(0 - int_max, "."), most_max.format("-", "8")) + self.assertEqual(nformat(-1 - int_max, "."), most_max.format("-", "9")) + self.assertEqual(nformat(-2 * int_max, "."), most_max2.format("-")) + + def test_float_numbers(self): + tests = [ + (9e-10, 10, "0.0000000009"), + (9e-19, 2, "0.00"), + (0.00000000000099, 0, "0"), + (0.00000000000099, 13, "0.0000000000009"), + (1e16, None, "10000000000000000"), + (1e16, 2, "10000000000000000.00"), + # A float without a fractional part (3.) results in a ".0" when no + # decimal_pos is given. Contrast that with the Decimal('3.') case + # in test_decimal_numbers which doesn't return a fractional part. + (3.0, None, "3.0"), + ] + for value, decimal_pos, expected_value in tests: + with self.subTest(value=value, decimal_pos=decimal_pos): + self.assertEqual(nformat(value, ".", decimal_pos), expected_value) + # Thousand grouping behavior. + self.assertEqual( + nformat(1e16, ".", thousand_sep=",", grouping=3, force_grouping=True), + "10,000,000,000,000,000", + ) + self.assertEqual( + nformat( + 1e16, + ".", + decimal_pos=2, + thousand_sep=",", + grouping=3, + force_grouping=True, + ), + "10,000,000,000,000,000.00", + ) + + def test_decimal_numbers(self): + self.assertEqual(nformat(Decimal("1234"), "."), "1234") + self.assertEqual(nformat(Decimal("1234.2"), "."), "1234.2") + self.assertEqual(nformat(Decimal("1234"), ".", decimal_pos=2), "1234.00") + self.assertEqual( + nformat(Decimal("1234"), ".", grouping=2, thousand_sep=","), "1234" + ) + self.assertEqual( + nformat( + Decimal("1234"), ".", grouping=2, thousand_sep=",", force_grouping=True + ), + "12,34", + ) + self.assertEqual(nformat(Decimal("-1234.33"), ".", decimal_pos=1), "-1234.3") + self.assertEqual( + nformat(Decimal("0.00000001"), ".", decimal_pos=8), "0.00000001" + ) + self.assertEqual(nformat(Decimal("9e-19"), ".", decimal_pos=2), "0.00") + self.assertEqual(nformat(Decimal(".00000000000099"), ".", decimal_pos=0), "0") + self.assertEqual( + nformat( + Decimal("1e16"), ".", thousand_sep=",", grouping=3, force_grouping=True + ), + "10,000,000,000,000,000", + ) + self.assertEqual( + nformat( + Decimal("1e16"), + ".", + decimal_pos=2, + thousand_sep=",", + grouping=3, + force_grouping=True, + ), + "10,000,000,000,000,000.00", + ) + self.assertEqual(nformat(Decimal("3."), "."), "3") + self.assertEqual(nformat(Decimal("3.0"), "."), "3.0") + # Very large & small numbers. + tests = [ + ("9e9999", None, "9e+9999"), + ("9e9999", 3, "9.000e+9999"), + ("9e201", None, "9e+201"), + ("9e200", None, "9e+200"), + ("1.2345e999", 2, "1.23e+999"), + ("9e-999", None, "9e-999"), + ("1e-7", 8, "0.00000010"), + ("1e-8", 8, "0.00000001"), + ("1e-9", 8, "0.00000000"), + ("1e-10", 8, "0.00000000"), + ("1e-11", 8, "0.00000000"), + ("1" + ("0" * 300), 3, "1.000e+300"), + ("0.{}1234".format("0" * 299), 3, "0.000"), + ] + for value, decimal_pos, expected_value in tests: + with self.subTest(value=value): + self.assertEqual( + nformat(Decimal(value), ".", decimal_pos), expected_value + ) + + def test_decimal_subclass(self): + class EuroDecimal(Decimal): + """ + Wrapper for Decimal which prefixes each amount with the € symbol. + """ + + def __format__(self, specifier, **kwargs): + amount = super().__format__(specifier, **kwargs) + return "€ {}".format(amount) + + price = EuroDecimal("1.23") + self.assertEqual(nformat(price, ","), "€ 1,23") + + def test_empty(self): + self.assertEqual(nformat("", "."), "") + self.assertEqual(nformat(None, "."), "None") diff --git a/testbed/django__django/tests/utils_tests/test_os_utils.py b/testbed/django__django/tests/utils_tests/test_os_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7204167688dd253f79d192fb5780703fbc99d0ae --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_os_utils.py @@ -0,0 +1,40 @@ +import os +import unittest +from pathlib import Path + +from django.core.exceptions import SuspiciousFileOperation +from django.utils._os import safe_join, to_path + + +class SafeJoinTests(unittest.TestCase): + def test_base_path_ends_with_sep(self): + drive, path = os.path.splitdrive(safe_join("/abc/", "abc")) + self.assertEqual(path, "{0}abc{0}abc".format(os.path.sep)) + + def test_root_path(self): + drive, path = os.path.splitdrive(safe_join("/", "path")) + self.assertEqual( + path, + "{}path".format(os.path.sep), + ) + + drive, path = os.path.splitdrive(safe_join("/", "")) + self.assertEqual( + path, + os.path.sep, + ) + + def test_parent_path(self): + with self.assertRaises(SuspiciousFileOperation): + safe_join("/abc/", "../def") + + +class ToPathTests(unittest.TestCase): + def test_to_path(self): + for path in ("/tmp/some_file.txt", Path("/tmp/some_file.txt")): + with self.subTest(path): + self.assertEqual(to_path(path), Path("/tmp/some_file.txt")) + + def test_to_path_invalid_value(self): + with self.assertRaises(TypeError): + to_path(42) diff --git a/testbed/django__django/tests/utils_tests/test_regex_helper.py b/testbed/django__django/tests/utils_tests/test_regex_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..ca918e5be75c1bf7be0b056303053ec691c532ff --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_regex_helper.py @@ -0,0 +1,57 @@ +import re +import unittest + +from django.test import SimpleTestCase +from django.utils import regex_helper + + +class NormalizeTests(unittest.TestCase): + def test_empty(self): + pattern = r"" + expected = [("", [])] + result = regex_helper.normalize(pattern) + self.assertEqual(result, expected) + + def test_escape(self): + pattern = r"\\\^\$\.\|\?\*\+\(\)\[" + expected = [("\\^$.|?*+()[", [])] + result = regex_helper.normalize(pattern) + self.assertEqual(result, expected) + + def test_group_positional(self): + pattern = r"(.*)-(.+)" + expected = [("%(_0)s-%(_1)s", ["_0", "_1"])] + result = regex_helper.normalize(pattern) + self.assertEqual(result, expected) + + def test_group_noncapturing(self): + pattern = r"(?:non-capturing)" + expected = [("non-capturing", [])] + result = regex_helper.normalize(pattern) + self.assertEqual(result, expected) + + def test_group_named(self): + pattern = r"(?P.*)-(?P.*)" + expected = [ + ( + "%(first_group_name)s-%(second_group_name)s", + ["first_group_name", "second_group_name"], + ) + ] + result = regex_helper.normalize(pattern) + self.assertEqual(result, expected) + + def test_group_backreference(self): + pattern = r"(?P.*)-(?P=first_group_name)" + expected = [("%(first_group_name)s-%(first_group_name)s", ["first_group_name"])] + result = regex_helper.normalize(pattern) + self.assertEqual(result, expected) + + +class LazyReCompileTests(SimpleTestCase): + def test_flags_with_pre_compiled_regex(self): + test_pattern = re.compile("test") + lazy_test_pattern = regex_helper._lazy_re_compile(test_pattern, re.I) + msg = "flags must be empty if regex is passed pre-compiled" + with self.assertRaisesMessage(AssertionError, msg): + lazy_test_pattern.match("TEST") diff --git a/testbed/django__django/tests/utils_tests/test_safestring.py b/testbed/django__django/tests/utils_tests/test_safestring.py new file mode 100644 index 0000000000000000000000000000000000000000..1a79afbf483fa84cf66cc9b7f2c5f7eb249d0971 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_safestring.py @@ -0,0 +1,123 @@ +from django.template import Context, Template +from django.test import SimpleTestCase +from django.utils import html, translation +from django.utils.functional import Promise, lazy, lazystr +from django.utils.safestring import SafeData, SafeString, mark_safe +from django.utils.translation import gettext_lazy + + +class customescape(str): + def __html__(self): + # Implement specific and wrong escaping in order to be able to detect + # when it runs. + return self.replace("<", "<<").replace(">", ">>") + + +class SafeStringTest(SimpleTestCase): + def assertRenderEqual(self, tpl, expected, **context): + context = Context(context) + tpl = Template(tpl) + self.assertEqual(tpl.render(context), expected) + + def test_mark_safe(self): + s = mark_safe("a&b") + + self.assertRenderEqual("{{ s }}", "a&b", s=s) + self.assertRenderEqual("{{ s|force_escape }}", "a&b", s=s) + + def test_mark_safe_str(self): + """ + Calling str() on a SafeString instance doesn't lose the safe status. + """ + s = mark_safe("a&b") + self.assertIsInstance(str(s), type(s)) + + def test_mark_safe_object_implementing_dunder_html(self): + e = customescape("") + s = mark_safe(e) + self.assertIs(s, e) + + self.assertRenderEqual("{{ s }}", "<>", s=s) + self.assertRenderEqual("{{ s|force_escape }}", "<a&b>", s=s) + + def test_mark_safe_lazy(self): + safe_s = mark_safe(lazystr("a&b")) + + self.assertIsInstance(safe_s, Promise) + self.assertRenderEqual("{{ s }}", "a&b", s=safe_s) + self.assertIsInstance(str(safe_s), SafeData) + + def test_mark_safe_lazy_i18n(self): + s = mark_safe(gettext_lazy("name")) + tpl = Template("{{ s }}") + with translation.override("fr"): + self.assertEqual(tpl.render(Context({"s": s})), "nom") + + def test_mark_safe_object_implementing_dunder_str(self): + class Obj: + def __str__(self): + return "" + + s = mark_safe(Obj()) + + self.assertRenderEqual("{{ s }}", "", s=s) + + def test_mark_safe_result_implements_dunder_html(self): + self.assertEqual(mark_safe("a&b").__html__(), "a&b") + + def test_mark_safe_lazy_result_implements_dunder_html(self): + self.assertEqual(mark_safe(lazystr("a&b")).__html__(), "a&b") + + def test_add_lazy_safe_text_and_safe_text(self): + s = html.escape(lazystr("a")) + s += mark_safe("&b") + self.assertRenderEqual("{{ s }}", "a&b", s=s) + + s = html.escapejs(lazystr("a")) + s += mark_safe("&b") + self.assertRenderEqual("{{ s }}", "a&b", s=s) + + def test_mark_safe_as_decorator(self): + """ + mark_safe used as a decorator leaves the result of a function + unchanged. + """ + + def clean_string_provider(): + return "dummy" + + self.assertEqual(mark_safe(clean_string_provider)(), clean_string_provider()) + + def test_mark_safe_decorator_does_not_affect_dunder_html(self): + """ + mark_safe doesn't affect a callable that has an __html__() method. + """ + + class SafeStringContainer: + def __html__(self): + return "" + + self.assertIs(mark_safe(SafeStringContainer), SafeStringContainer) + + def test_mark_safe_decorator_does_not_affect_promises(self): + """ + mark_safe doesn't affect lazy strings (Promise objects). + """ + + def html_str(): + return "" + + lazy_str = lazy(html_str, str)() + self.assertEqual(mark_safe(lazy_str), html_str()) + + def test_default_additional_attrs(self): + s = SafeString("a&b") + msg = "object has no attribute 'dynamic_attr'" + with self.assertRaisesMessage(AttributeError, msg): + s.dynamic_attr = True + + def test_default_safe_data_additional_attrs(self): + s = SafeData() + msg = "object has no attribute 'dynamic_attr'" + with self.assertRaisesMessage(AttributeError, msg): + s.dynamic_attr = True diff --git a/testbed/django__django/tests/utils_tests/test_simplelazyobject.py b/testbed/django__django/tests/utils_tests/test_simplelazyobject.py new file mode 100644 index 0000000000000000000000000000000000000000..7c9c021afbdf9536ac0f13d4f3b054392d7d091d --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_simplelazyobject.py @@ -0,0 +1,16 @@ +import pickle + +from django.contrib.auth.models import User +from django.test import TestCase +from django.utils.functional import SimpleLazyObject + + +class TestUtilsSimpleLazyObjectDjangoTestCase(TestCase): + def test_pickle(self): + user = User.objects.create_user("johndoe", "john@example.com", "pass") + x = SimpleLazyObject(lambda: user) + pickle.dumps(x) + # Try the variant protocol levels. + pickle.dumps(x, 0) + pickle.dumps(x, 1) + pickle.dumps(x, 2) diff --git a/testbed/django__django/tests/utils_tests/test_termcolors.py b/testbed/django__django/tests/utils_tests/test_termcolors.py new file mode 100644 index 0000000000000000000000000000000000000000..7ee4f9b0b20c5dfc565d1656e853981306ae6d4b --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_termcolors.py @@ -0,0 +1,233 @@ +import unittest + +from django.utils.termcolors import ( + DARK_PALETTE, + DEFAULT_PALETTE, + LIGHT_PALETTE, + NOCOLOR_PALETTE, + PALETTES, + colorize, + parse_color_setting, +) + + +class TermColorTests(unittest.TestCase): + def test_empty_string(self): + self.assertEqual(parse_color_setting(""), PALETTES[DEFAULT_PALETTE]) + + def test_simple_palette(self): + self.assertEqual(parse_color_setting("light"), PALETTES[LIGHT_PALETTE]) + self.assertEqual(parse_color_setting("dark"), PALETTES[DARK_PALETTE]) + self.assertIsNone(parse_color_setting("nocolor")) + + def test_fg(self): + self.assertEqual( + parse_color_setting("error=green"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green"}), + ) + + def test_fg_bg(self): + self.assertEqual( + parse_color_setting("error=green/blue"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green", "bg": "blue"}), + ) + + def test_fg_opts(self): + self.assertEqual( + parse_color_setting("error=green,blink"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green", "opts": ("blink",)}), + ) + self.assertEqual( + parse_color_setting("error=green,bold,blink"), + dict( + PALETTES[NOCOLOR_PALETTE], + ERROR={"fg": "green", "opts": ("blink", "bold")}, + ), + ) + + def test_fg_bg_opts(self): + self.assertEqual( + parse_color_setting("error=green/blue,blink"), + dict( + PALETTES[NOCOLOR_PALETTE], + ERROR={"fg": "green", "bg": "blue", "opts": ("blink",)}, + ), + ) + self.assertEqual( + parse_color_setting("error=green/blue,bold,blink"), + dict( + PALETTES[NOCOLOR_PALETTE], + ERROR={"fg": "green", "bg": "blue", "opts": ("blink", "bold")}, + ), + ) + + def test_override_palette(self): + self.assertEqual( + parse_color_setting("light;error=green"), + dict(PALETTES[LIGHT_PALETTE], ERROR={"fg": "green"}), + ) + + def test_override_nocolor(self): + self.assertEqual( + parse_color_setting("nocolor;error=green"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green"}), + ) + + def test_reverse_override(self): + self.assertEqual( + parse_color_setting("error=green;light"), PALETTES[LIGHT_PALETTE] + ) + + def test_multiple_roles(self): + self.assertEqual( + parse_color_setting("error=green;sql_field=blue"), + dict( + PALETTES[NOCOLOR_PALETTE], + ERROR={"fg": "green"}, + SQL_FIELD={"fg": "blue"}, + ), + ) + + def test_override_with_multiple_roles(self): + self.assertEqual( + parse_color_setting("light;error=green;sql_field=blue"), + dict( + PALETTES[LIGHT_PALETTE], ERROR={"fg": "green"}, SQL_FIELD={"fg": "blue"} + ), + ) + + def test_empty_definition(self): + self.assertIsNone(parse_color_setting(";")) + self.assertEqual(parse_color_setting("light;"), PALETTES[LIGHT_PALETTE]) + self.assertIsNone(parse_color_setting(";;;")) + + def test_empty_options(self): + self.assertEqual( + parse_color_setting("error=green,"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green"}), + ) + self.assertEqual( + parse_color_setting("error=green,,,"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green"}), + ) + self.assertEqual( + parse_color_setting("error=green,,blink,,"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green", "opts": ("blink",)}), + ) + + def test_bad_palette(self): + self.assertIsNone(parse_color_setting("unknown")) + + def test_bad_role(self): + self.assertIsNone(parse_color_setting("unknown=")) + self.assertIsNone(parse_color_setting("unknown=green")) + self.assertEqual( + parse_color_setting("unknown=green;sql_field=blue"), + dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={"fg": "blue"}), + ) + + def test_bad_color(self): + self.assertIsNone(parse_color_setting("error=")) + self.assertEqual( + parse_color_setting("error=;sql_field=blue"), + dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={"fg": "blue"}), + ) + self.assertIsNone(parse_color_setting("error=unknown")) + self.assertEqual( + parse_color_setting("error=unknown;sql_field=blue"), + dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={"fg": "blue"}), + ) + self.assertEqual( + parse_color_setting("error=green/unknown"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green"}), + ) + self.assertEqual( + parse_color_setting("error=green/blue/something"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green", "bg": "blue"}), + ) + self.assertEqual( + parse_color_setting("error=green/blue/something,blink"), + dict( + PALETTES[NOCOLOR_PALETTE], + ERROR={"fg": "green", "bg": "blue", "opts": ("blink",)}, + ), + ) + + def test_bad_option(self): + self.assertEqual( + parse_color_setting("error=green,unknown"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green"}), + ) + self.assertEqual( + parse_color_setting("error=green,unknown,blink"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green", "opts": ("blink",)}), + ) + + def test_role_case(self): + self.assertEqual( + parse_color_setting("ERROR=green"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green"}), + ) + self.assertEqual( + parse_color_setting("eRrOr=green"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green"}), + ) + + def test_color_case(self): + self.assertEqual( + parse_color_setting("error=GREEN"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green"}), + ) + self.assertEqual( + parse_color_setting("error=GREEN/BLUE"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green", "bg": "blue"}), + ) + self.assertEqual( + parse_color_setting("error=gReEn"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green"}), + ) + self.assertEqual( + parse_color_setting("error=gReEn/bLuE"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green", "bg": "blue"}), + ) + + def test_opts_case(self): + self.assertEqual( + parse_color_setting("error=green,BLINK"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green", "opts": ("blink",)}), + ) + self.assertEqual( + parse_color_setting("error=green,bLiNk"), + dict(PALETTES[NOCOLOR_PALETTE], ERROR={"fg": "green", "opts": ("blink",)}), + ) + + def test_colorize_empty_text(self): + self.assertEqual(colorize(text=None), "\x1b[m\x1b[0m") + self.assertEqual(colorize(text=""), "\x1b[m\x1b[0m") + + self.assertEqual(colorize(text=None, opts=("noreset",)), "\x1b[m") + self.assertEqual(colorize(text="", opts=("noreset",)), "\x1b[m") + + def test_colorize_reset(self): + self.assertEqual(colorize(text="", opts=("reset",)), "\x1b[0m") + + def test_colorize_fg_bg(self): + self.assertEqual(colorize(text="Test", fg="red"), "\x1b[31mTest\x1b[0m") + self.assertEqual(colorize(text="Test", bg="red"), "\x1b[41mTest\x1b[0m") + # Ignored kwarg. + self.assertEqual(colorize(text="Test", other="red"), "\x1b[mTest\x1b[0m") + + def test_colorize_opts(self): + self.assertEqual( + colorize(text="Test", opts=("bold", "underscore")), + "\x1b[1;4mTest\x1b[0m", + ) + self.assertEqual( + colorize(text="Test", opts=("blink",)), + "\x1b[5mTest\x1b[0m", + ) + # Ignored opts. + self.assertEqual( + colorize(text="Test", opts=("not_an_option",)), + "\x1b[mTest\x1b[0m", + ) diff --git a/testbed/django__django/tests/utils_tests/test_text.py b/testbed/django__django/tests/utils_tests/test_text.py new file mode 100644 index 0000000000000000000000000000000000000000..cb2959fe157246aea94d62e4ae07008acc16c0e4 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_text.py @@ -0,0 +1,314 @@ +import json +import sys + +from django.core.exceptions import SuspiciousFileOperation +from django.test import SimpleTestCase +from django.utils import text +from django.utils.functional import lazystr +from django.utils.text import format_lazy +from django.utils.translation import gettext_lazy, override + +IS_WIDE_BUILD = len("\U0001F4A9") == 1 + + +class TestUtilsText(SimpleTestCase): + def test_get_text_list(self): + self.assertEqual(text.get_text_list(["a", "b", "c", "d"]), "a, b, c or d") + self.assertEqual(text.get_text_list(["a", "b", "c"], "and"), "a, b and c") + self.assertEqual(text.get_text_list(["a", "b"], "and"), "a and b") + self.assertEqual(text.get_text_list(["a"]), "a") + self.assertEqual(text.get_text_list([]), "") + with override("ar"): + self.assertEqual(text.get_text_list(["a", "b", "c"]), "a، b أو c") + + def test_smart_split(self): + testdata = [ + ('This is "a person" test.', ["This", "is", '"a person"', "test."]), + ('This is "a person\'s" test.', ["This", "is", '"a person\'s"', "test."]), + ('This is "a person\\"s" test.', ["This", "is", '"a person\\"s"', "test."]), + ("\"a 'one", ['"a', "'one"]), + ("all friends' tests", ["all", "friends'", "tests"]), + ( + 'url search_page words="something else"', + ["url", "search_page", 'words="something else"'], + ), + ( + "url search_page words='something else'", + ["url", "search_page", "words='something else'"], + ), + ( + 'url search_page words "something else"', + ["url", "search_page", "words", '"something else"'], + ), + ( + 'url search_page words-"something else"', + ["url", "search_page", 'words-"something else"'], + ), + ("url search_page words=hello", ["url", "search_page", "words=hello"]), + ( + 'url search_page words="something else', + ["url", "search_page", 'words="something', "else"], + ), + ("cut:','|cut:' '", ["cut:','|cut:' '"]), + (lazystr("a b c d"), ["a", "b", "c", "d"]), # Test for #20231 + ] + for test, expected in testdata: + with self.subTest(value=test): + self.assertEqual(list(text.smart_split(test)), expected) + + def test_truncate_chars(self): + truncator = text.Truncator("The quick brown fox jumped over the lazy dog.") + self.assertEqual( + "The quick brown fox jumped over the lazy dog.", truncator.chars(100) + ), + self.assertEqual("The quick brown fox …", truncator.chars(21)), + self.assertEqual("The quick brown fo.....", truncator.chars(23, ".....")), + self.assertEqual(".....", truncator.chars(4, ".....")), + + nfc = text.Truncator("o\xfco\xfco\xfco\xfc") + nfd = text.Truncator("ou\u0308ou\u0308ou\u0308ou\u0308") + self.assertEqual("oüoüoüoü", nfc.chars(8)) + self.assertEqual("oüoüoüoü", nfd.chars(8)) + self.assertEqual("oü…", nfc.chars(3)) + self.assertEqual("oü…", nfd.chars(3)) + + # Ensure the final length is calculated correctly when there are + # combining characters with no precomposed form, and that combining + # characters are not split up. + truncator = text.Truncator("-B\u030AB\u030A----8") + self.assertEqual("-B\u030A…", truncator.chars(3)) + self.assertEqual("-B\u030AB\u030A-…", truncator.chars(5)) + self.assertEqual("-B\u030AB\u030A----8", truncator.chars(8)) + + # Ensure the length of the end text is correctly calculated when it + # contains combining characters with no precomposed form. + truncator = text.Truncator("-----") + self.assertEqual("---B\u030A", truncator.chars(4, "B\u030A")) + self.assertEqual("-----", truncator.chars(5, "B\u030A")) + + # Make a best effort to shorten to the desired length, but requesting + # a length shorter than the ellipsis shouldn't break + self.assertEqual("…", text.Truncator("asdf").chars(0)) + # lazy strings are handled correctly + self.assertEqual( + text.Truncator(lazystr("The quick brown fox")).chars(10), "The quick…" + ) + + def test_truncate_chars_html(self): + perf_test_values = [ + (("", None), + ("&" * 50000, "&" * 9 + "…"), + ("_X<<<<<<<<<<<>", None), + ] + for value, expected in perf_test_values: + with self.subTest(value=value): + truncator = text.Truncator(value) + self.assertEqual( + expected if expected else value, truncator.chars(10, html=True) + ) + + def test_truncate_words(self): + truncator = text.Truncator("The quick brown fox jumped over the lazy dog.") + self.assertEqual( + "The quick brown fox jumped over the lazy dog.", truncator.words(10) + ) + self.assertEqual("The quick brown fox…", truncator.words(4)) + self.assertEqual("The quick brown fox[snip]", truncator.words(4, "[snip]")) + # lazy strings are handled correctly + truncator = text.Truncator( + lazystr("The quick brown fox jumped over the lazy dog.") + ) + self.assertEqual("The quick brown fox…", truncator.words(4)) + + def test_truncate_html_words(self): + truncator = text.Truncator( + '

    The quick brown fox jumped over the lazy dog.' + "

    " + ) + self.assertEqual( + '

    The quick brown fox jumped over the lazy dog.' + "

    ", + truncator.words(10, html=True), + ) + self.assertEqual( + '

    The quick brown fox…

    ', + truncator.words(4, html=True), + ) + self.assertEqual( + '

    The quick brown fox....

    ', + truncator.words(4, "....", html=True), + ) + self.assertEqual( + '

    The quick brown fox

    ', + truncator.words(4, "", html=True), + ) + + # Test with new line inside tag + truncator = text.Truncator( + '

    The quick brown fox jumped over ' + "the lazy dog.

    " + ) + self.assertEqual( + '

    The quick brown…

    ', + truncator.words(3, html=True), + ) + + # Test self-closing tags + truncator = text.Truncator( + "
    The
    quick brown fox jumped over the lazy dog." + ) + self.assertEqual("
    The
    quick brown…", truncator.words(3, html=True)) + truncator = text.Truncator( + "
    The
    quick brown fox jumped over the lazy dog." + ) + self.assertEqual( + "
    The
    quick brown…", truncator.words(3, html=True) + ) + + # Test html entities + truncator = text.Truncator( + "Buenos días! ¿Cómo está?" + ) + self.assertEqual( + "Buenos días! ¿Cómo…", + truncator.words(3, html=True), + ) + truncator = text.Truncator("

    I <3 python, what about you?

    ") + self.assertEqual("

    I <3 python,…

    ", truncator.words(3, html=True)) + + perf_test_values = [ + ("", + "&" * 50000, + "_X<<<<<<<<<<<>", + ] + for value in perf_test_values: + with self.subTest(value=value): + truncator = text.Truncator(value) + self.assertEqual(value, truncator.words(50, html=True)) + + def test_wrap(self): + digits = "1234 67 9" + self.assertEqual(text.wrap(digits, 100), "1234 67 9") + self.assertEqual(text.wrap(digits, 9), "1234 67 9") + self.assertEqual(text.wrap(digits, 8), "1234 67\n9") + + self.assertEqual(text.wrap("short\na long line", 7), "short\na long\nline") + self.assertEqual( + text.wrap("do-not-break-long-words please? ok", 8), + "do-not-break-long-words\nplease?\nok", + ) + + long_word = "l%sng" % ("o" * 20) + self.assertEqual(text.wrap(long_word, 20), long_word) + self.assertEqual( + text.wrap("a %s word" % long_word, 10), "a\n%s\nword" % long_word + ) + self.assertEqual(text.wrap(lazystr(digits), 100), "1234 67 9") + + def test_normalize_newlines(self): + self.assertEqual( + text.normalize_newlines("abc\ndef\rghi\r\n"), "abc\ndef\nghi\n" + ) + self.assertEqual(text.normalize_newlines("\n\r\r\n\r"), "\n\n\n\n") + self.assertEqual(text.normalize_newlines("abcdefghi"), "abcdefghi") + self.assertEqual(text.normalize_newlines(""), "") + self.assertEqual( + text.normalize_newlines(lazystr("abc\ndef\rghi\r\n")), "abc\ndef\nghi\n" + ) + + def test_phone2numeric(self): + numeric = text.phone2numeric("0800 flowers") + self.assertEqual(numeric, "0800 3569377") + lazy_numeric = lazystr(text.phone2numeric("0800 flowers")) + self.assertEqual(lazy_numeric, "0800 3569377") + + def test_slugify(self): + items = ( + # given - expected - Unicode? + ("Hello, World!", "hello-world", False), + ("spam & eggs", "spam-eggs", False), + (" multiple---dash and space ", "multiple-dash-and-space", False), + ("\t whitespace-in-value \n", "whitespace-in-value", False), + ("underscore_in-value", "underscore_in-value", False), + ("__strip__underscore-value___", "strip__underscore-value", False), + ("--strip-dash-value---", "strip-dash-value", False), + ("__strip-mixed-value---", "strip-mixed-value", False), + ("_ -strip-mixed-value _-", "strip-mixed-value", False), + ("spam & ıçüş", "spam-ıçüş", True), + ("foo ıç bar", "foo-ıç-bar", True), + (" foo ıç bar", "foo-ıç-bar", True), + ("你好", "你好", True), + ("İstanbul", "istanbul", True), + ) + for value, output, is_unicode in items: + with self.subTest(value=value): + self.assertEqual(text.slugify(value, allow_unicode=is_unicode), output) + # Interning the result may be useful, e.g. when fed to Path. + with self.subTest("intern"): + self.assertEqual(sys.intern(text.slugify("a")), "a") + + def test_unescape_string_literal(self): + items = [ + ('"abc"', "abc"), + ("'abc'", "abc"), + ('"a "bc""', 'a "bc"'), + ("''ab' c'", "'ab' c"), + ] + for value, output in items: + with self.subTest(value=value): + self.assertEqual(text.unescape_string_literal(value), output) + self.assertEqual(text.unescape_string_literal(lazystr(value)), output) + + def test_unescape_string_literal_invalid_value(self): + items = ["", "abc", "'abc\""] + for item in items: + msg = f"Not a string literal: {item!r}" + with self.assertRaisesMessage(ValueError, msg): + text.unescape_string_literal(item) + + def test_get_valid_filename(self): + filename = "^&'@{}[],$=!-#()%+~_123.txt" + self.assertEqual(text.get_valid_filename(filename), "-_123.txt") + self.assertEqual(text.get_valid_filename(lazystr(filename)), "-_123.txt") + msg = "Could not derive file name from '???'" + with self.assertRaisesMessage(SuspiciousFileOperation, msg): + text.get_valid_filename("???") + # After sanitizing this would yield '..'. + msg = "Could not derive file name from '$.$.$'" + with self.assertRaisesMessage(SuspiciousFileOperation, msg): + text.get_valid_filename("$.$.$") + + def test_compress_sequence(self): + data = [{"key": i} for i in range(10)] + seq = list(json.JSONEncoder().iterencode(data)) + seq = [s.encode() for s in seq] + actual_length = len(b"".join(seq)) + out = text.compress_sequence(seq) + compressed_length = len(b"".join(out)) + self.assertLess(compressed_length, actual_length) + + def test_format_lazy(self): + self.assertEqual("django/test", format_lazy("{}/{}", "django", lazystr("test"))) + self.assertEqual("django/test", format_lazy("{0}/{1}", *("django", "test"))) + self.assertEqual( + "django/test", format_lazy("{a}/{b}", **{"a": "django", "b": "test"}) + ) + self.assertEqual( + "django/test", format_lazy("{a[0]}/{a[1]}", a=("django", "test")) + ) + + t = {} + s = format_lazy("{0[a]}-{p[a]}", t, p=t) + t["a"] = lazystr("django") + self.assertEqual("django-django", s) + t["a"] = "update" + self.assertEqual("update-update", s) + + # The format string can be lazy. (string comes from contrib.admin) + s = format_lazy( + gettext_lazy("Added {name} “{object}”."), + name="article", + object="My first try", + ) + with override("fr"): + self.assertEqual("Ajout de article «\xa0My first try\xa0».", s) diff --git a/testbed/django__django/tests/utils_tests/test_timesince.py b/testbed/django__django/tests/utils_tests/test_timesince.py new file mode 100644 index 0000000000000000000000000000000000000000..0727e65af4c9f7b7eb265a732d1d55662e9b243d --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_timesince.py @@ -0,0 +1,285 @@ +import datetime +import zoneinfo + +from django.test import TestCase +from django.test.utils import override_settings, requires_tz_support +from django.utils import timezone, translation +from django.utils.timesince import timesince, timeuntil +from django.utils.translation import npgettext_lazy + + +class TimesinceTests(TestCase): + def setUp(self): + self.t = datetime.datetime(2007, 8, 14, 13, 46, 0) + self.onemicrosecond = datetime.timedelta(microseconds=1) + self.onesecond = datetime.timedelta(seconds=1) + self.oneminute = datetime.timedelta(minutes=1) + self.onehour = datetime.timedelta(hours=1) + self.oneday = datetime.timedelta(days=1) + self.oneweek = datetime.timedelta(days=7) + self.onemonth = datetime.timedelta(days=31) + self.oneyear = datetime.timedelta(days=366) + + def test_equal_datetimes(self): + """equal datetimes.""" + # NOTE: \xa0 avoids wrapping between value and unit + self.assertEqual(timesince(self.t, self.t), "0\xa0minutes") + + def test_ignore_microseconds_and_seconds(self): + """Microseconds and seconds are ignored.""" + self.assertEqual( + timesince(self.t, self.t + self.onemicrosecond), "0\xa0minutes" + ) + self.assertEqual(timesince(self.t, self.t + self.onesecond), "0\xa0minutes") + + def test_other_units(self): + """Test other units.""" + self.assertEqual(timesince(self.t, self.t + self.oneminute), "1\xa0minute") + self.assertEqual(timesince(self.t, self.t + self.onehour), "1\xa0hour") + self.assertEqual(timesince(self.t, self.t + self.oneday), "1\xa0day") + self.assertEqual(timesince(self.t, self.t + self.oneweek), "1\xa0week") + self.assertEqual(timesince(self.t, self.t + self.onemonth), "1\xa0month") + self.assertEqual(timesince(self.t, self.t + self.oneyear), "1\xa0year") + + def test_multiple_units(self): + """Test multiple units.""" + self.assertEqual( + timesince(self.t, self.t + 2 * self.oneday + 6 * self.onehour), + "2\xa0days, 6\xa0hours", + ) + self.assertEqual( + timesince(self.t, self.t + 2 * self.oneweek + 2 * self.oneday), + "2\xa0weeks, 2\xa0days", + ) + + def test_display_first_unit(self): + """ + If the two differing units aren't adjacent, only the first unit is + displayed. + """ + self.assertEqual( + timesince( + self.t, + self.t + 2 * self.oneweek + 3 * self.onehour + 4 * self.oneminute, + ), + "2\xa0weeks", + ) + self.assertEqual( + timesince(self.t, self.t + 4 * self.oneday + 5 * self.oneminute), + "4\xa0days", + ) + + def test_display_second_before_first(self): + """ + When the second date occurs before the first, we should always + get 0 minutes. + """ + self.assertEqual( + timesince(self.t, self.t - self.onemicrosecond), "0\xa0minutes" + ) + self.assertEqual(timesince(self.t, self.t - self.onesecond), "0\xa0minutes") + self.assertEqual(timesince(self.t, self.t - self.oneminute), "0\xa0minutes") + self.assertEqual(timesince(self.t, self.t - self.onehour), "0\xa0minutes") + self.assertEqual(timesince(self.t, self.t - self.oneday), "0\xa0minutes") + self.assertEqual(timesince(self.t, self.t - self.oneweek), "0\xa0minutes") + self.assertEqual(timesince(self.t, self.t - self.onemonth), "0\xa0minutes") + self.assertEqual(timesince(self.t, self.t - self.oneyear), "0\xa0minutes") + self.assertEqual( + timesince(self.t, self.t - 2 * self.oneday - 6 * self.onehour), + "0\xa0minutes", + ) + self.assertEqual( + timesince(self.t, self.t - 2 * self.oneweek - 2 * self.oneday), + "0\xa0minutes", + ) + self.assertEqual( + timesince( + self.t, + self.t - 2 * self.oneweek - 3 * self.onehour - 4 * self.oneminute, + ), + "0\xa0minutes", + ) + self.assertEqual( + timesince(self.t, self.t - 4 * self.oneday - 5 * self.oneminute), + "0\xa0minutes", + ) + + def test_second_before_equal_first_humanize_time_strings(self): + time_strings = { + "minute": npgettext_lazy( + "naturaltime-future", + "%(num)d minute", + "%(num)d minutes", + "num", + ), + } + with translation.override("cs"): + for now in [self.t, self.t - self.onemicrosecond, self.t - self.oneday]: + with self.subTest(now): + self.assertEqual( + timesince(self.t, now, time_strings=time_strings), + "0\xa0minut", + ) + + @requires_tz_support + def test_different_timezones(self): + """When using two different timezones.""" + now = datetime.datetime.now() + now_tz = timezone.make_aware(now, timezone.get_default_timezone()) + now_tz_i = timezone.localtime(now_tz, timezone.get_fixed_timezone(195)) + + self.assertEqual(timesince(now), "0\xa0minutes") + self.assertEqual(timesince(now_tz), "0\xa0minutes") + self.assertEqual(timesince(now_tz_i), "0\xa0minutes") + self.assertEqual(timesince(now_tz, now_tz_i), "0\xa0minutes") + self.assertEqual(timeuntil(now), "0\xa0minutes") + self.assertEqual(timeuntil(now_tz), "0\xa0minutes") + self.assertEqual(timeuntil(now_tz_i), "0\xa0minutes") + self.assertEqual(timeuntil(now_tz, now_tz_i), "0\xa0minutes") + + def test_date_objects(self): + """Both timesince and timeuntil should work on date objects (#17937).""" + today = datetime.date.today() + self.assertEqual(timesince(today + self.oneday), "0\xa0minutes") + self.assertEqual(timeuntil(today - self.oneday), "0\xa0minutes") + + def test_both_date_objects(self): + """Timesince should work with both date objects (#9672)""" + today = datetime.date.today() + self.assertEqual(timeuntil(today + self.oneday, today), "1\xa0day") + self.assertEqual(timeuntil(today - self.oneday, today), "0\xa0minutes") + self.assertEqual(timeuntil(today + self.oneweek, today), "1\xa0week") + + def test_leap_year(self): + start_date = datetime.date(2016, 12, 25) + self.assertEqual(timeuntil(start_date + self.oneweek, start_date), "1\xa0week") + self.assertEqual(timesince(start_date, start_date + self.oneweek), "1\xa0week") + + def test_leap_year_new_years_eve(self): + t = datetime.date(2016, 12, 31) + now = datetime.datetime(2016, 12, 31, 18, 0, 0) + self.assertEqual(timesince(t + self.oneday, now), "0\xa0minutes") + self.assertEqual(timeuntil(t - self.oneday, now), "0\xa0minutes") + + def test_naive_datetime_with_tzinfo_attribute(self): + class naive(datetime.tzinfo): + def utcoffset(self, dt): + return None + + future = datetime.datetime(2080, 1, 1, tzinfo=naive()) + self.assertEqual(timesince(future), "0\xa0minutes") + past = datetime.datetime(1980, 1, 1, tzinfo=naive()) + self.assertEqual(timeuntil(past), "0\xa0minutes") + + def test_thousand_years_ago(self): + t = self.t.replace(year=self.t.year - 1000) + self.assertEqual(timesince(t, self.t), "1000\xa0years") + self.assertEqual(timeuntil(self.t, t), "1000\xa0years") + + def test_depth(self): + t = ( + self.t + + self.oneyear + + self.onemonth + + self.oneweek + + self.oneday + + self.onehour + ) + tests = [ + (t, 1, "1\xa0year"), + (t, 2, "1\xa0year, 1\xa0month"), + (t, 3, "1\xa0year, 1\xa0month, 1\xa0week"), + (t, 4, "1\xa0year, 1\xa0month, 1\xa0week, 1\xa0day"), + (t, 5, "1\xa0year, 1\xa0month, 1\xa0week, 1\xa0day, 1\xa0hour"), + (t, 6, "1\xa0year, 1\xa0month, 1\xa0week, 1\xa0day, 1\xa0hour"), + (self.t + self.onehour, 5, "1\xa0hour"), + (self.t + (4 * self.oneminute), 3, "4\xa0minutes"), + (self.t + self.onehour + self.oneminute, 1, "1\xa0hour"), + (self.t + self.oneday + self.onehour, 1, "1\xa0day"), + (self.t + self.oneweek + self.oneday, 1, "1\xa0week"), + (self.t + self.onemonth + self.oneweek, 1, "1\xa0month"), + (self.t + self.oneyear + self.onemonth, 1, "1\xa0year"), + (self.t + self.oneyear + self.oneweek + self.oneday, 3, "1\xa0year"), + ] + for value, depth, expected in tests: + with self.subTest(): + self.assertEqual(timesince(self.t, value, depth=depth), expected) + self.assertEqual(timeuntil(value, self.t, depth=depth), expected) + + def test_months_edge(self): + t = datetime.datetime(2022, 1, 1) + tests = [ + (datetime.datetime(2022, 1, 31), "4\xa0weeks, 2\xa0days"), + (datetime.datetime(2022, 2, 1), "1\xa0month"), + (datetime.datetime(2022, 2, 28), "1\xa0month, 3\xa0weeks"), + (datetime.datetime(2022, 3, 1), "2\xa0months"), + (datetime.datetime(2022, 3, 31), "2\xa0months, 4\xa0weeks"), + (datetime.datetime(2022, 4, 1), "3\xa0months"), + (datetime.datetime(2022, 4, 30), "3\xa0months, 4\xa0weeks"), + (datetime.datetime(2022, 5, 1), "4\xa0months"), + (datetime.datetime(2022, 5, 31), "4\xa0months, 4\xa0weeks"), + (datetime.datetime(2022, 6, 1), "5\xa0months"), + (datetime.datetime(2022, 6, 30), "5\xa0months, 4\xa0weeks"), + (datetime.datetime(2022, 7, 1), "6\xa0months"), + (datetime.datetime(2022, 7, 31), "6\xa0months, 4\xa0weeks"), + (datetime.datetime(2022, 8, 1), "7\xa0months"), + (datetime.datetime(2022, 8, 31), "7\xa0months, 4\xa0weeks"), + (datetime.datetime(2022, 9, 1), "8\xa0months"), + (datetime.datetime(2022, 9, 30), "8\xa0months, 4\xa0weeks"), + (datetime.datetime(2022, 10, 1), "9\xa0months"), + (datetime.datetime(2022, 10, 31), "9\xa0months, 4\xa0weeks"), + (datetime.datetime(2022, 11, 1), "10\xa0months"), + (datetime.datetime(2022, 11, 30), "10\xa0months, 4\xa0weeks"), + (datetime.datetime(2022, 12, 1), "11\xa0months"), + (datetime.datetime(2022, 12, 31), "11\xa0months, 4\xa0weeks"), + ] + for value, expected in tests: + with self.subTest(): + self.assertEqual(timesince(t, value), expected) + + def test_depth_invalid(self): + msg = "depth must be greater than 0." + with self.assertRaisesMessage(ValueError, msg): + timesince(self.t, self.t, depth=0) + + @requires_tz_support + def test_less_than_a_day_with_zoneinfo(self): + now_with_zoneinfo = timezone.now().astimezone( + zoneinfo.ZoneInfo(key="Asia/Kathmandu") # UTC+05:45 + ) + tests = [ + (now_with_zoneinfo, "0\xa0minutes"), + (now_with_zoneinfo - self.onemicrosecond, "0\xa0minutes"), + (now_with_zoneinfo - self.onesecond, "0\xa0minutes"), + (now_with_zoneinfo - self.oneminute, "1\xa0minute"), + (now_with_zoneinfo - self.onehour, "1\xa0hour"), + ] + for value, expected in tests: + with self.subTest(value): + self.assertEqual(timesince(value), expected) + + @requires_tz_support + def test_less_than_a_day_cross_day_with_zoneinfo(self): + now_with_zoneinfo = timezone.make_aware( + datetime.datetime(2023, 4, 14, 1, 30, 30), + zoneinfo.ZoneInfo(key="Asia/Kathmandu"), # UTC+05:45 + ) + now_utc = now_with_zoneinfo.astimezone(datetime.timezone.utc) + tests = [ + (now_with_zoneinfo, "0\xa0minutes"), + (now_with_zoneinfo - self.onemicrosecond, "0\xa0minutes"), + (now_with_zoneinfo - self.onesecond, "0\xa0minutes"), + (now_with_zoneinfo - self.oneminute, "1\xa0minute"), + (now_with_zoneinfo - self.onehour, "1\xa0hour"), + ] + for value, expected in tests: + with self.subTest(value): + self.assertEqual(timesince(value, now_utc), expected) + + +@requires_tz_support +@override_settings(USE_TZ=True) +class TZAwareTimesinceTests(TimesinceTests): + def setUp(self): + super().setUp() + self.t = timezone.make_aware(self.t, timezone.get_default_timezone()) diff --git a/testbed/django__django/tests/utils_tests/test_timezone.py b/testbed/django__django/tests/utils_tests/test_timezone.py new file mode 100644 index 0000000000000000000000000000000000000000..43bb2bc7a342c3d145c91ed02b54838040c7ce42 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_timezone.py @@ -0,0 +1,255 @@ +import datetime +import zoneinfo +from unittest import mock + +from django.test import SimpleTestCase, override_settings +from django.utils import timezone + +PARIS_ZI = zoneinfo.ZoneInfo("Europe/Paris") +EAT = timezone.get_fixed_timezone(180) # Africa/Nairobi +ICT = timezone.get_fixed_timezone(420) # Asia/Bangkok +UTC = datetime.timezone.utc + + +class TimezoneTests(SimpleTestCase): + def test_default_timezone_is_zoneinfo(self): + self.assertIsInstance(timezone.get_default_timezone(), zoneinfo.ZoneInfo) + + def test_now(self): + with override_settings(USE_TZ=True): + self.assertTrue(timezone.is_aware(timezone.now())) + with override_settings(USE_TZ=False): + self.assertTrue(timezone.is_naive(timezone.now())) + + def test_localdate(self): + naive = datetime.datetime(2015, 1, 1, 0, 0, 1) + with self.assertRaisesMessage( + ValueError, "localtime() cannot be applied to a naive datetime" + ): + timezone.localdate(naive) + with self.assertRaisesMessage( + ValueError, "localtime() cannot be applied to a naive datetime" + ): + timezone.localdate(naive, timezone=EAT) + + aware = datetime.datetime(2015, 1, 1, 0, 0, 1, tzinfo=ICT) + self.assertEqual( + timezone.localdate(aware, timezone=EAT), datetime.date(2014, 12, 31) + ) + with timezone.override(EAT): + self.assertEqual(timezone.localdate(aware), datetime.date(2014, 12, 31)) + + with mock.patch("django.utils.timezone.now", return_value=aware): + self.assertEqual( + timezone.localdate(timezone=EAT), datetime.date(2014, 12, 31) + ) + with timezone.override(EAT): + self.assertEqual(timezone.localdate(), datetime.date(2014, 12, 31)) + + def test_override(self): + default = timezone.get_default_timezone() + try: + timezone.activate(ICT) + + with timezone.override(EAT): + self.assertIs(EAT, timezone.get_current_timezone()) + self.assertIs(ICT, timezone.get_current_timezone()) + + with timezone.override(None): + self.assertIs(default, timezone.get_current_timezone()) + self.assertIs(ICT, timezone.get_current_timezone()) + + timezone.deactivate() + + with timezone.override(EAT): + self.assertIs(EAT, timezone.get_current_timezone()) + self.assertIs(default, timezone.get_current_timezone()) + + with timezone.override(None): + self.assertIs(default, timezone.get_current_timezone()) + self.assertIs(default, timezone.get_current_timezone()) + finally: + timezone.deactivate() + + def test_override_decorator(self): + default = timezone.get_default_timezone() + + @timezone.override(EAT) + def func_tz_eat(): + self.assertIs(EAT, timezone.get_current_timezone()) + + @timezone.override(None) + def func_tz_none(): + self.assertIs(default, timezone.get_current_timezone()) + + try: + timezone.activate(ICT) + + func_tz_eat() + self.assertIs(ICT, timezone.get_current_timezone()) + + func_tz_none() + self.assertIs(ICT, timezone.get_current_timezone()) + + timezone.deactivate() + + func_tz_eat() + self.assertIs(default, timezone.get_current_timezone()) + + func_tz_none() + self.assertIs(default, timezone.get_current_timezone()) + finally: + timezone.deactivate() + + def test_override_string_tz(self): + with timezone.override("Asia/Bangkok"): + self.assertEqual(timezone.get_current_timezone_name(), "Asia/Bangkok") + + def test_override_fixed_offset(self): + with timezone.override(datetime.timezone(datetime.timedelta(), "tzname")): + self.assertEqual(timezone.get_current_timezone_name(), "tzname") + + def test_activate_invalid_timezone(self): + with self.assertRaisesMessage(ValueError, "Invalid timezone: None"): + timezone.activate(None) + + def test_is_aware(self): + self.assertTrue( + timezone.is_aware(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)) + ) + self.assertFalse(timezone.is_aware(datetime.datetime(2011, 9, 1, 13, 20, 30))) + + def test_is_naive(self): + self.assertFalse( + timezone.is_naive(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)) + ) + self.assertTrue(timezone.is_naive(datetime.datetime(2011, 9, 1, 13, 20, 30))) + + def test_make_aware(self): + self.assertEqual( + timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30), EAT), + datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), + ) + with self.assertRaises(ValueError): + timezone.make_aware( + datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), EAT + ) + + def test_make_naive(self): + self.assertEqual( + timezone.make_naive( + datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), EAT + ), + datetime.datetime(2011, 9, 1, 13, 20, 30), + ) + self.assertEqual( + timezone.make_naive( + datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT), EAT + ), + datetime.datetime(2011, 9, 1, 13, 20, 30), + ) + + with self.assertRaisesMessage( + ValueError, "make_naive() cannot be applied to a naive datetime" + ): + timezone.make_naive(datetime.datetime(2011, 9, 1, 13, 20, 30), EAT) + + def test_make_naive_no_tz(self): + self.assertEqual( + timezone.make_naive(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)), + datetime.datetime(2011, 9, 1, 5, 20, 30), + ) + + def test_make_aware_no_tz(self): + self.assertEqual( + timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30)), + datetime.datetime( + 2011, 9, 1, 13, 20, 30, tzinfo=timezone.get_fixed_timezone(-300) + ), + ) + + def test_make_aware2(self): + CEST = datetime.timezone(datetime.timedelta(hours=2), "CEST") + self.assertEqual( + timezone.make_aware(datetime.datetime(2011, 9, 1, 12, 20, 30), PARIS_ZI), + datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=CEST), + ) + with self.assertRaises(ValueError): + timezone.make_aware( + datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=PARIS_ZI), PARIS_ZI + ) + + def test_make_naive_zoneinfo(self): + self.assertEqual( + timezone.make_naive( + datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=PARIS_ZI), PARIS_ZI + ), + datetime.datetime(2011, 9, 1, 12, 20, 30), + ) + + self.assertEqual( + timezone.make_naive( + datetime.datetime(2011, 9, 1, 12, 20, 30, fold=1, tzinfo=PARIS_ZI), + PARIS_ZI, + ), + datetime.datetime(2011, 9, 1, 12, 20, 30, fold=1), + ) + + def test_make_aware_zoneinfo_ambiguous(self): + # 2:30 happens twice, once before DST ends and once after + ambiguous = datetime.datetime(2015, 10, 25, 2, 30) + + std = timezone.make_aware(ambiguous.replace(fold=1), timezone=PARIS_ZI) + dst = timezone.make_aware(ambiguous, timezone=PARIS_ZI) + + self.assertEqual( + std.astimezone(UTC) - dst.astimezone(UTC), datetime.timedelta(hours=1) + ) + self.assertEqual(std.utcoffset(), datetime.timedelta(hours=1)) + self.assertEqual(dst.utcoffset(), datetime.timedelta(hours=2)) + + def test_make_aware_zoneinfo_non_existent(self): + # 2:30 never happened due to DST + non_existent = datetime.datetime(2015, 3, 29, 2, 30) + + std = timezone.make_aware(non_existent, PARIS_ZI) + dst = timezone.make_aware(non_existent.replace(fold=1), PARIS_ZI) + + self.assertEqual( + std.astimezone(UTC) - dst.astimezone(UTC), datetime.timedelta(hours=1) + ) + self.assertEqual(std.utcoffset(), datetime.timedelta(hours=1)) + self.assertEqual(dst.utcoffset(), datetime.timedelta(hours=2)) + + def test_get_timezone_name(self): + """ + The _get_timezone_name() helper must return the offset for fixed offset + timezones, for usage with Trunc DB functions. + + The datetime.timezone examples show the current behavior. + """ + tests = [ + # datetime.timezone, fixed offset with and without `name`. + (datetime.timezone(datetime.timedelta(hours=10)), "UTC+10:00"), + ( + datetime.timezone(datetime.timedelta(hours=10), name="Etc/GMT-10"), + "Etc/GMT-10", + ), + # zoneinfo, named and fixed offset. + (zoneinfo.ZoneInfo("Europe/Madrid"), "Europe/Madrid"), + (zoneinfo.ZoneInfo("Etc/GMT-10"), "+10"), + ] + for tz, expected in tests: + with self.subTest(tz=tz, expected=expected): + self.assertEqual(timezone._get_timezone_name(tz), expected) + + def test_get_default_timezone(self): + self.assertEqual(timezone.get_default_timezone_name(), "America/Chicago") + + def test_fixedoffset_timedelta(self): + delta = datetime.timedelta(hours=1) + self.assertEqual(timezone.get_fixed_timezone(delta).utcoffset(None), delta) + + def test_fixedoffset_negative_timedelta(self): + delta = datetime.timedelta(hours=-2) + self.assertEqual(timezone.get_fixed_timezone(delta).utcoffset(None), delta) diff --git a/testbed/django__django/tests/utils_tests/test_tree.py b/testbed/django__django/tests/utils_tests/test_tree.py new file mode 100644 index 0000000000000000000000000000000000000000..04223964ba698089bd1e3072ec39e9f3f0c37eb7 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/test_tree.py @@ -0,0 +1,126 @@ +import copy +import unittest + +from django.db.models.sql import AND, OR +from django.utils.tree import Node + + +class NodeTests(unittest.TestCase): + def setUp(self): + self.node1_children = [("a", 1), ("b", 2)] + self.node1 = Node(self.node1_children) + self.node2 = Node() + + def test_str(self): + self.assertEqual(str(self.node1), "(DEFAULT: ('a', 1), ('b', 2))") + self.assertEqual(str(self.node2), "(DEFAULT: )") + + def test_repr(self): + self.assertEqual(repr(self.node1), "") + self.assertEqual(repr(self.node2), "") + + def test_hash(self): + node3 = Node(self.node1_children, negated=True) + node4 = Node(self.node1_children, connector="OTHER") + node5 = Node(self.node1_children) + node6 = Node([["a", 1], ["b", 2]]) + node7 = Node([("a", [1, 2])]) + node8 = Node([("a", (1, 2))]) + self.assertNotEqual(hash(self.node1), hash(self.node2)) + self.assertNotEqual(hash(self.node1), hash(node3)) + self.assertNotEqual(hash(self.node1), hash(node4)) + self.assertEqual(hash(self.node1), hash(node5)) + self.assertEqual(hash(self.node1), hash(node6)) + self.assertEqual(hash(self.node2), hash(Node())) + self.assertEqual(hash(node7), hash(node8)) + + def test_len(self): + self.assertEqual(len(self.node1), 2) + self.assertEqual(len(self.node2), 0) + + def test_bool(self): + self.assertTrue(self.node1) + self.assertFalse(self.node2) + + def test_contains(self): + self.assertIn(("a", 1), self.node1) + self.assertNotIn(("a", 1), self.node2) + + def test_add(self): + # start with the same children of node1 then add an item + node3 = Node(self.node1_children) + node3_added_child = ("c", 3) + # add() returns the added data + self.assertEqual(node3.add(node3_added_child, Node.default), node3_added_child) + # we added exactly one item, len() should reflect that + self.assertEqual(len(self.node1) + 1, len(node3)) + self.assertEqual(str(node3), "(DEFAULT: ('a', 1), ('b', 2), ('c', 3))") + + def test_add_eq_child_mixed_connector(self): + node = Node(["a", "b"], OR) + self.assertEqual(node.add("a", AND), "a") + self.assertEqual(node, Node([Node(["a", "b"], OR), "a"], AND)) + + def test_negate(self): + # negated is False by default + self.assertFalse(self.node1.negated) + self.node1.negate() + self.assertTrue(self.node1.negated) + self.node1.negate() + self.assertFalse(self.node1.negated) + + def test_create(self): + SubNode = type("SubNode", (Node,), {}) + + a = SubNode([SubNode(["a", "b"], OR), "c"], AND) + b = SubNode.create(a.children, a.connector, a.negated) + self.assertEqual(a, b) + # Children lists are the same object, but equal. + self.assertIsNot(a.children, b.children) + self.assertEqual(a.children, b.children) + # Child Node objects are the same objects. + for a_child, b_child in zip(a.children, b.children): + if isinstance(a_child, Node): + self.assertIs(a_child, b_child) + self.assertEqual(a_child, b_child) + + def test_copy(self): + a = Node([Node(["a", "b"], OR), "c"], AND) + b = copy.copy(a) + self.assertEqual(a, b) + # Children lists are the same object. + self.assertIs(a.children, b.children) + # Child Node objects are the same objects. + for a_child, b_child in zip(a.children, b.children): + if isinstance(a_child, Node): + self.assertIs(a_child, b_child) + self.assertEqual(a_child, b_child) + + def test_deepcopy(self): + a = Node([Node(["a", "b"], OR), "c"], AND) + b = copy.deepcopy(a) + self.assertEqual(a, b) + # Children lists are not be the same object, but equal. + self.assertIsNot(a.children, b.children) + self.assertEqual(a.children, b.children) + # Child Node objects are not be the same objects. + for a_child, b_child in zip(a.children, b.children): + if isinstance(a_child, Node): + self.assertIsNot(a_child, b_child) + self.assertEqual(a_child, b_child) + + def test_eq_children(self): + node = Node(self.node1_children) + self.assertEqual(node, self.node1) + self.assertNotEqual(node, self.node2) + + def test_eq_connector(self): + new_node = Node(connector="NEW") + default_node = Node(connector="DEFAULT") + self.assertEqual(default_node, self.node2) + self.assertNotEqual(default_node, new_node) + + def test_eq_negated(self): + node = Node(negated=False) + negated = Node(negated=True) + self.assertNotEqual(negated, node) diff --git a/testbed/django__django/tests/utils_tests/utils.py b/testbed/django__django/tests/utils_tests/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b17bd1b0973179974c005d2a4333f4eb1eb0a234 --- /dev/null +++ b/testbed/django__django/tests/utils_tests/utils.py @@ -0,0 +1,14 @@ +import platform + + +def on_macos_with_hfs(): + """ + MacOS 10.13 (High Sierra) and lower can use HFS+ as a filesystem. + HFS+ has a time resolution of only one second which can be too low for + some of the tests. + """ + macos_version = platform.mac_ver()[0] + if macos_version != "": + parsed_macos_version = tuple(int(x) for x in macos_version.split(".")) + return parsed_macos_version < (10, 14) + return False diff --git a/testbed/django__django/tests/validation/__init__.py b/testbed/django__django/tests/validation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5d87d8c7311fc94b75f6ff08a8ba0f66331b9cab --- /dev/null +++ b/testbed/django__django/tests/validation/__init__.py @@ -0,0 +1,14 @@ +from django.core.exceptions import ValidationError + + +class ValidationAssertions: + def assertFailsValidation(self, clean, failed_fields, **kwargs): + with self.assertRaises(ValidationError) as cm: + clean(**kwargs) + self.assertEqual(sorted(failed_fields), sorted(cm.exception.message_dict)) + + def assertFieldFailsValidationWithMessage(self, clean, field_name, message): + with self.assertRaises(ValidationError) as cm: + clean() + self.assertIn(field_name, cm.exception.message_dict) + self.assertEqual(message, cm.exception.message_dict[field_name]) diff --git a/testbed/django__django/tests/validation/models.py b/testbed/django__django/tests/validation/models.py new file mode 100644 index 0000000000000000000000000000000000000000..8919a69310030007930415b96af6b6a0aa141d84 --- /dev/null +++ b/testbed/django__django/tests/validation/models.py @@ -0,0 +1,219 @@ +from datetime import datetime + +from django.core.exceptions import ValidationError +from django.db import models +from django.db.models.functions import Lower + + +def validate_answer_to_universe(value): + if value != 42: + raise ValidationError( + "This is not the answer to life, universe and everything!", code="not42" + ) + + +class ModelToValidate(models.Model): + name = models.CharField(max_length=100) + created = models.DateTimeField(default=datetime.now) + number = models.IntegerField(db_column="number_val") + parent = models.ForeignKey( + "self", + models.SET_NULL, + blank=True, + null=True, + limit_choices_to={"number": 10}, + ) + email = models.EmailField(blank=True) + ufm = models.ForeignKey( + "UniqueFieldsModel", + models.SET_NULL, + to_field="unique_charfield", + blank=True, + null=True, + ) + url = models.URLField(blank=True) + f_with_custom_validator = models.IntegerField( + blank=True, null=True, validators=[validate_answer_to_universe] + ) + f_with_iterable_of_validators = models.IntegerField( + blank=True, null=True, validators=(validate_answer_to_universe,) + ) + slug = models.SlugField(blank=True) + + def clean(self): + super().clean() + if self.number == 11: + raise ValidationError("Invalid number supplied!") + + +class UniqueFieldsModel(models.Model): + unique_charfield = models.CharField(max_length=100, unique=True) + unique_integerfield = models.IntegerField(unique=True) + non_unique_field = models.IntegerField() + + +class CustomPKModel(models.Model): + my_pk_field = models.CharField(max_length=100, primary_key=True) + + +class UniqueTogetherModel(models.Model): + cfield = models.CharField(max_length=100) + ifield = models.IntegerField() + efield = models.EmailField() + + class Meta: + unique_together = ( + ( + "ifield", + "cfield", + ), + ["ifield", "efield"], + ) + + +class UniqueForDateModel(models.Model): + start_date = models.DateField() + end_date = models.DateTimeField() + count = models.IntegerField( + unique_for_date="start_date", unique_for_year="end_date" + ) + order = models.IntegerField(unique_for_month="end_date") + name = models.CharField(max_length=100) + + +class CustomMessagesModel(models.Model): + other = models.IntegerField(blank=True, null=True) + number = models.IntegerField( + db_column="number_val", + error_messages={"null": "NULL", "not42": "AAARGH", "not_equal": "%s != me"}, + validators=[validate_answer_to_universe], + ) + + +class AuthorManager(models.Manager): + def get_queryset(self): + qs = super().get_queryset() + return qs.filter(archived=False) + + +class Author(models.Model): + name = models.CharField(max_length=100) + archived = models.BooleanField(default=False) + + objects = AuthorManager() + + +class Article(models.Model): + title = models.CharField(max_length=100) + author = models.ForeignKey(Author, models.CASCADE) + pub_date = models.DateTimeField(blank=True) + + def clean(self): + if self.pub_date is None: + self.pub_date = datetime.now() + + +class Post(models.Model): + title = models.CharField(max_length=50, unique_for_date="posted", blank=True) + slug = models.CharField(max_length=50, unique_for_year="posted", blank=True) + subtitle = models.CharField(max_length=50, unique_for_month="posted", blank=True) + posted = models.DateField() + + +class FlexibleDatePost(models.Model): + title = models.CharField(max_length=50, unique_for_date="posted", blank=True) + slug = models.CharField(max_length=50, unique_for_year="posted", blank=True) + subtitle = models.CharField(max_length=50, unique_for_month="posted", blank=True) + posted = models.DateField(blank=True, null=True) + + +class UniqueErrorsModel(models.Model): + name = models.CharField( + max_length=100, + unique=True, + error_messages={"unique": "Custom unique name message."}, + ) + no = models.IntegerField( + unique=True, error_messages={"unique": "Custom unique number message."} + ) + + +class GenericIPAddressTestModel(models.Model): + generic_ip = models.GenericIPAddressField(blank=True, null=True, unique=True) + v4_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv4") + v6_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv6") + ip_verbose_name = models.GenericIPAddressField( + "IP Address Verbose", blank=True, null=True + ) + + +class GenericIPAddrUnpackUniqueTest(models.Model): + generic_v4unpack_ip = models.GenericIPAddressField( + null=True, blank=True, unique=True, unpack_ipv4=True + ) + + +class UniqueFuncConstraintModel(models.Model): + field = models.CharField(max_length=255) + + class Meta: + required_db_features = {"supports_expression_indexes"} + constraints = [ + models.UniqueConstraint(Lower("field"), name="func_lower_field_uq"), + ] + + +class Product(models.Model): + price = models.IntegerField(null=True) + discounted_price = models.IntegerField(null=True) + + class Meta: + required_db_features = { + "supports_table_check_constraints", + } + constraints = [ + models.CheckConstraint( + check=models.Q(price__gt=models.F("discounted_price")), + name="price_gt_discounted_price_validation", + ), + ] + + +class ChildProduct(Product): + class Meta: + required_db_features = { + "supports_table_check_constraints", + } + + +class UniqueConstraintProduct(models.Model): + name = models.CharField(max_length=255) + color = models.CharField(max_length=32) + rank = models.IntegerField() + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["name", "color"], name="name_color_uniq_validation" + ), + models.UniqueConstraint(fields=["rank"], name="rank_uniq_validation"), + ] + + +class ChildUniqueConstraintProduct(UniqueConstraintProduct): + pass + + +class UniqueConstraintConditionProduct(models.Model): + name = models.CharField(max_length=255) + color = models.CharField(max_length=31, null=True, blank=True) + + class Meta: + required_db_features = {"supports_partial_indexes"} + constraints = [ + models.UniqueConstraint( + fields=["name"], + name="name_without_color_uniq_validation", + condition=models.Q(color__isnull=True), + ), + ] diff --git a/testbed/django__django/tests/validation/test_constraints.py b/testbed/django__django/tests/validation/test_constraints.py new file mode 100644 index 0000000000000000000000000000000000000000..0b1ee6518e28ce9fbcf640dc302145beb007c050 --- /dev/null +++ b/testbed/django__django/tests/validation/test_constraints.py @@ -0,0 +1,95 @@ +from django.core.exceptions import ValidationError +from django.test import TestCase, skipUnlessDBFeature + +from .models import ( + ChildProduct, + ChildUniqueConstraintProduct, + Product, + UniqueConstraintConditionProduct, + UniqueConstraintProduct, +) + + +class PerformConstraintChecksTest(TestCase): + @skipUnlessDBFeature("supports_table_check_constraints") + def test_full_clean_with_check_constraints(self): + product = Product(price=10, discounted_price=15) + with self.assertRaises(ValidationError) as cm: + product.full_clean() + self.assertEqual( + cm.exception.message_dict, + { + "__all__": [ + "Constraint “price_gt_discounted_price_validation” is violated." + ] + }, + ) + + @skipUnlessDBFeature("supports_table_check_constraints") + def test_full_clean_with_check_constraints_on_child_model(self): + product = ChildProduct(price=10, discounted_price=15) + with self.assertRaises(ValidationError) as cm: + product.full_clean() + self.assertEqual( + cm.exception.message_dict, + { + "__all__": [ + "Constraint “price_gt_discounted_price_validation” is violated." + ] + }, + ) + + @skipUnlessDBFeature("supports_table_check_constraints") + def test_full_clean_with_check_constraints_disabled(self): + product = Product(price=10, discounted_price=15) + product.full_clean(validate_constraints=False) + + def test_full_clean_with_unique_constraints(self): + UniqueConstraintProduct.objects.create(name="product", color="yellow", rank=1) + tests = [ + UniqueConstraintProduct(name="product", color="yellow", rank=1), + # Child model. + ChildUniqueConstraintProduct(name="product", color="yellow", rank=1), + ] + for product in tests: + with self.subTest(model=product.__class__.__name__): + with self.assertRaises(ValidationError) as cm: + product.full_clean() + self.assertEqual( + cm.exception.message_dict, + { + "__all__": [ + "Unique constraint product with this Name and Color " + "already exists." + ], + "rank": [ + "Unique constraint product with this Rank already exists." + ], + }, + ) + + def test_full_clean_with_unique_constraints_disabled(self): + UniqueConstraintProduct.objects.create(name="product", color="yellow", rank=1) + product = UniqueConstraintProduct(name="product", color="yellow", rank=1) + product.full_clean(validate_constraints=False) + + @skipUnlessDBFeature("supports_partial_indexes") + def test_full_clean_with_partial_unique_constraints(self): + UniqueConstraintConditionProduct.objects.create(name="product") + product = UniqueConstraintConditionProduct(name="product") + with self.assertRaises(ValidationError) as cm: + product.full_clean() + self.assertEqual( + cm.exception.message_dict, + { + "__all__": [ + "Constraint “name_without_color_uniq_validation” is violated." + ] + }, + ) + + @skipUnlessDBFeature("supports_partial_indexes") + def test_full_clean_with_partial_unique_constraints_disabled(self): + UniqueConstraintConditionProduct.objects.create(name="product") + product = UniqueConstraintConditionProduct(name="product") + product.full_clean(validate_constraints=False) diff --git a/testbed/django__django/tests/validation/test_custom_messages.py b/testbed/django__django/tests/validation/test_custom_messages.py new file mode 100644 index 0000000000000000000000000000000000000000..3b130caa1e719792a85b0f05a047af3795de6dd4 --- /dev/null +++ b/testbed/django__django/tests/validation/test_custom_messages.py @@ -0,0 +1,14 @@ +from django.test import SimpleTestCase + +from . import ValidationAssertions +from .models import CustomMessagesModel + + +class CustomMessagesTests(ValidationAssertions, SimpleTestCase): + def test_custom_simple_validator_message(self): + cmm = CustomMessagesModel(number=12) + self.assertFieldFailsValidationWithMessage(cmm.full_clean, "number", ["AAARGH"]) + + def test_custom_null_message(self): + cmm = CustomMessagesModel() + self.assertFieldFailsValidationWithMessage(cmm.full_clean, "number", ["NULL"]) diff --git a/testbed/django__django/tests/validation/test_error_messages.py b/testbed/django__django/tests/validation/test_error_messages.py new file mode 100644 index 0000000000000000000000000000000000000000..ffc660aad6ee06e762ec588f847741632ba1f52d --- /dev/null +++ b/testbed/django__django/tests/validation/test_error_messages.py @@ -0,0 +1,133 @@ +from unittest import TestCase + +from django.core.exceptions import ValidationError +from django.db import models + + +class ValidationMessagesTest(TestCase): + def _test_validation_messages(self, field, value, expected): + with self.assertRaises(ValidationError) as cm: + field.clean(value, None) + self.assertEqual(cm.exception.messages, expected) + + def test_autofield_field_raises_error_message(self): + f = models.AutoField(primary_key=True) + self._test_validation_messages(f, "fõo", ["“fõo” value must be an integer."]) + + def test_integer_field_raises_error_message(self): + f = models.IntegerField() + self._test_validation_messages(f, "fõo", ["“fõo” value must be an integer."]) + + def test_boolean_field_raises_error_message(self): + f = models.BooleanField() + self._test_validation_messages( + f, "fõo", ["“fõo” value must be either True or False."] + ) + + def test_nullable_boolean_field_raises_error_message(self): + f = models.BooleanField(null=True) + self._test_validation_messages( + f, "fõo", ["“fõo” value must be either True, False, or None."] + ) + + def test_float_field_raises_error_message(self): + f = models.FloatField() + self._test_validation_messages(f, "fõo", ["“fõo” value must be a float."]) + + def test_decimal_field_raises_error_message(self): + f = models.DecimalField() + self._test_validation_messages( + f, "fõo", ["“fõo” value must be a decimal number."] + ) + + def test_null_boolean_field_raises_error_message(self): + f = models.BooleanField(null=True) + self._test_validation_messages( + f, "fõo", ["“fõo” value must be either True, False, or None."] + ) + + def test_date_field_raises_error_message(self): + f = models.DateField() + self._test_validation_messages( + f, + "fõo", + [ + "“fõo” value has an invalid date format. It must be in YYYY-MM-DD " + "format." + ], + ) + self._test_validation_messages( + f, + "aaaa-10-10", + [ + "“aaaa-10-10” value has an invalid date format. It must be in " + "YYYY-MM-DD format." + ], + ) + self._test_validation_messages( + f, + "2011-13-10", + [ + "“2011-13-10” value has the correct format (YYYY-MM-DD) but it is an " + "invalid date." + ], + ) + self._test_validation_messages( + f, + "2011-10-32", + [ + "“2011-10-32” value has the correct format (YYYY-MM-DD) but it is an " + "invalid date." + ], + ) + + def test_datetime_field_raises_error_message(self): + f = models.DateTimeField() + # Wrong format + self._test_validation_messages( + f, + "fõo", + [ + "“fõo” value has an invalid format. It must be in " + "YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format." + ], + ) + # Correct format but invalid date + self._test_validation_messages( + f, + "2011-10-32", + [ + "“2011-10-32” value has the correct format (YYYY-MM-DD) but it is an " + "invalid date." + ], + ) + # Correct format but invalid date/time + self._test_validation_messages( + f, + "2011-10-32 10:10", + [ + "“2011-10-32 10:10” value has the correct format " + "(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) but it is an invalid date/time." + ], + ) + + def test_time_field_raises_error_message(self): + f = models.TimeField() + # Wrong format + self._test_validation_messages( + f, + "fõo", + [ + "“fõo” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] " + "format." + ], + ) + # Correct format but invalid time + self._test_validation_messages( + f, + "25:50", + [ + "“25:50” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is " + "an invalid time." + ], + ) diff --git a/testbed/django__django/tests/validation/test_picklable.py b/testbed/django__django/tests/validation/test_picklable.py new file mode 100644 index 0000000000000000000000000000000000000000..1fce55d9a37d0963af85bdd90e8770157077c125 --- /dev/null +++ b/testbed/django__django/tests/validation/test_picklable.py @@ -0,0 +1,52 @@ +import pickle +from unittest import TestCase + +from django.core.exceptions import ValidationError + + +class PickableValidationErrorTestCase(TestCase): + def test_validationerror_is_picklable(self): + original = ValidationError("a", code="something") + unpickled = pickle.loads(pickle.dumps(original)) + self.assertIs(unpickled, unpickled.error_list[0]) + self.assertEqual(original.message, unpickled.message) + self.assertEqual(original.code, unpickled.code) + + original = ValidationError("a", code="something") + unpickled = pickle.loads(pickle.dumps(ValidationError(original))) + self.assertIs(unpickled, unpickled.error_list[0]) + self.assertEqual(original.message, unpickled.message) + self.assertEqual(original.code, unpickled.code) + + original = ValidationError(["a", "b"]) + unpickled = pickle.loads(pickle.dumps(original)) + self.assertEqual( + original.error_list[0].message, unpickled.error_list[0].message + ) + self.assertEqual( + original.error_list[1].message, unpickled.error_list[1].message + ) + + original = ValidationError(["a", "b"]) + unpickled = pickle.loads(pickle.dumps(ValidationError(original))) + self.assertEqual( + original.error_list[0].message, unpickled.error_list[0].message + ) + self.assertEqual( + original.error_list[1].message, unpickled.error_list[1].message + ) + + original = ValidationError([ValidationError("a"), ValidationError("b")]) + unpickled = pickle.loads(pickle.dumps(original)) + self.assertIs(unpickled.args[0][0], unpickled.error_list[0]) + self.assertEqual( + original.error_list[0].message, unpickled.error_list[0].message + ) + self.assertEqual( + original.error_list[1].message, unpickled.error_list[1].message + ) + + message_dict = {"field1": ["a", "b"], "field2": ["c", "d"]} + original = ValidationError(message_dict) + unpickled = pickle.loads(pickle.dumps(original)) + self.assertEqual(unpickled.message_dict, message_dict) diff --git a/testbed/django__django/tests/validation/test_unique.py b/testbed/django__django/tests/validation/test_unique.py new file mode 100644 index 0000000000000000000000000000000000000000..4a8b3894f03bfeee7b2c10322e1fcfbda3ddbbb6 --- /dev/null +++ b/testbed/django__django/tests/validation/test_unique.py @@ -0,0 +1,229 @@ +import datetime +import unittest + +from django.apps.registry import Apps +from django.core.exceptions import ValidationError +from django.db import models +from django.test import TestCase + +from .models import ( + CustomPKModel, + FlexibleDatePost, + ModelToValidate, + Post, + UniqueErrorsModel, + UniqueFieldsModel, + UniqueForDateModel, + UniqueFuncConstraintModel, + UniqueTogetherModel, +) + + +class GetUniqueCheckTests(unittest.TestCase): + def test_unique_fields_get_collected(self): + m = UniqueFieldsModel() + self.assertEqual( + ( + [ + (UniqueFieldsModel, ("id",)), + (UniqueFieldsModel, ("unique_charfield",)), + (UniqueFieldsModel, ("unique_integerfield",)), + ], + [], + ), + m._get_unique_checks(), + ) + + def test_unique_together_gets_picked_up_and_converted_to_tuple(self): + m = UniqueTogetherModel() + self.assertEqual( + ( + [ + (UniqueTogetherModel, ("ifield", "cfield")), + (UniqueTogetherModel, ("ifield", "efield")), + (UniqueTogetherModel, ("id",)), + ], + [], + ), + m._get_unique_checks(), + ) + + def test_unique_together_normalization(self): + """ + Test the Meta.unique_together normalization with different sorts of + objects. + """ + data = { + "2-tuple": (("foo", "bar"), (("foo", "bar"),)), + "list": (["foo", "bar"], (("foo", "bar"),)), + "already normalized": ( + (("foo", "bar"), ("bar", "baz")), + (("foo", "bar"), ("bar", "baz")), + ), + "set": ( + {("foo", "bar"), ("bar", "baz")}, # Ref #21469 + (("foo", "bar"), ("bar", "baz")), + ), + } + + for unique_together, normalized in data.values(): + + class M(models.Model): + foo = models.IntegerField() + bar = models.IntegerField() + baz = models.IntegerField() + + Meta = type( + "Meta", (), {"unique_together": unique_together, "apps": Apps()} + ) + + checks, _ = M()._get_unique_checks() + for t in normalized: + check = (M, t) + self.assertIn(check, checks) + + def test_primary_key_is_considered_unique(self): + m = CustomPKModel() + self.assertEqual( + ([(CustomPKModel, ("my_pk_field",))], []), m._get_unique_checks() + ) + + def test_unique_for_date_gets_picked_up(self): + m = UniqueForDateModel() + self.assertEqual( + ( + [(UniqueForDateModel, ("id",))], + [ + (UniqueForDateModel, "date", "count", "start_date"), + (UniqueForDateModel, "year", "count", "end_date"), + (UniqueForDateModel, "month", "order", "end_date"), + ], + ), + m._get_unique_checks(), + ) + + def test_unique_for_date_exclusion(self): + m = UniqueForDateModel() + self.assertEqual( + ( + [(UniqueForDateModel, ("id",))], + [ + (UniqueForDateModel, "year", "count", "end_date"), + (UniqueForDateModel, "month", "order", "end_date"), + ], + ), + m._get_unique_checks(exclude="start_date"), + ) + + def test_func_unique_constraint_ignored(self): + m = UniqueFuncConstraintModel() + self.assertEqual( + m._get_unique_checks(), + ([(UniqueFuncConstraintModel, ("id",))], []), + ) + + +class PerformUniqueChecksTest(TestCase): + def test_primary_key_unique_check_not_performed_when_adding_and_pk_not_specified( + self, + ): + # Regression test for #12560 + with self.assertNumQueries(0): + mtv = ModelToValidate(number=10, name="Some Name") + setattr(mtv, "_adding", True) + mtv.full_clean() + + def test_primary_key_unique_check_performed_when_adding_and_pk_specified(self): + # Regression test for #12560 + with self.assertNumQueries(1): + mtv = ModelToValidate(number=10, name="Some Name", id=123) + setattr(mtv, "_adding", True) + mtv.full_clean() + + def test_primary_key_unique_check_not_performed_when_not_adding(self): + # Regression test for #12132 + with self.assertNumQueries(0): + mtv = ModelToValidate(number=10, name="Some Name") + mtv.full_clean() + + def test_unique_for_date(self): + Post.objects.create( + title="Django 1.0 is released", + slug="Django 1.0", + subtitle="Finally", + posted=datetime.date(2008, 9, 3), + ) + p = Post(title="Django 1.0 is released", posted=datetime.date(2008, 9, 3)) + with self.assertRaises(ValidationError) as cm: + p.full_clean() + self.assertEqual( + cm.exception.message_dict, + {"title": ["Title must be unique for Posted date."]}, + ) + + # Should work without errors + p = Post(title="Work on Django 1.1 begins", posted=datetime.date(2008, 9, 3)) + p.full_clean() + + # Should work without errors + p = Post(title="Django 1.0 is released", posted=datetime.datetime(2008, 9, 4)) + p.full_clean() + + p = Post(slug="Django 1.0", posted=datetime.datetime(2008, 1, 1)) + with self.assertRaises(ValidationError) as cm: + p.full_clean() + self.assertEqual( + cm.exception.message_dict, + {"slug": ["Slug must be unique for Posted year."]}, + ) + + p = Post(subtitle="Finally", posted=datetime.datetime(2008, 9, 30)) + with self.assertRaises(ValidationError) as cm: + p.full_clean() + self.assertEqual( + cm.exception.message_dict, + {"subtitle": ["Subtitle must be unique for Posted month."]}, + ) + + p = Post(title="Django 1.0 is released") + with self.assertRaises(ValidationError) as cm: + p.full_clean() + self.assertEqual( + cm.exception.message_dict, {"posted": ["This field cannot be null."]} + ) + + def test_unique_for_date_with_nullable_date(self): + """ + unique_for_date/year/month checks shouldn't trigger when the + associated DateField is None. + """ + FlexibleDatePost.objects.create( + title="Django 1.0 is released", + slug="Django 1.0", + subtitle="Finally", + posted=datetime.date(2008, 9, 3), + ) + p = FlexibleDatePost(title="Django 1.0 is released") + p.full_clean() + + p = FlexibleDatePost(slug="Django 1.0") + p.full_clean() + + p = FlexibleDatePost(subtitle="Finally") + p.full_clean() + + def test_unique_errors(self): + UniqueErrorsModel.objects.create(name="Some Name", no=10) + m = UniqueErrorsModel(name="Some Name", no=11) + with self.assertRaises(ValidationError) as cm: + m.full_clean() + self.assertEqual( + cm.exception.message_dict, {"name": ["Custom unique name message."]} + ) + + m = UniqueErrorsModel(name="Some Other Name", no=10) + with self.assertRaises(ValidationError) as cm: + m.full_clean() + self.assertEqual( + cm.exception.message_dict, {"no": ["Custom unique number message."]} + ) diff --git a/testbed/django__django/tests/validation/test_validators.py b/testbed/django__django/tests/validation/test_validators.py new file mode 100644 index 0000000000000000000000000000000000000000..0c7250f8b161586ff40b66ed165c521825f84a00 --- /dev/null +++ b/testbed/django__django/tests/validation/test_validators.py @@ -0,0 +1,43 @@ +from django.test import SimpleTestCase + +from . import ValidationAssertions +from .models import ModelToValidate + + +class TestModelsWithValidators(ValidationAssertions, SimpleTestCase): + def test_custom_validator_passes_for_correct_value(self): + mtv = ModelToValidate( + number=10, + name="Some Name", + f_with_custom_validator=42, + f_with_iterable_of_validators=42, + ) + self.assertIsNone(mtv.full_clean()) + + def test_custom_validator_raises_error_for_incorrect_value(self): + mtv = ModelToValidate( + number=10, + name="Some Name", + f_with_custom_validator=12, + f_with_iterable_of_validators=42, + ) + self.assertFailsValidation(mtv.full_clean, ["f_with_custom_validator"]) + self.assertFieldFailsValidationWithMessage( + mtv.full_clean, + "f_with_custom_validator", + ["This is not the answer to life, universe and everything!"], + ) + + def test_field_validators_can_be_any_iterable(self): + mtv = ModelToValidate( + number=10, + name="Some Name", + f_with_custom_validator=42, + f_with_iterable_of_validators=12, + ) + self.assertFailsValidation(mtv.full_clean, ["f_with_iterable_of_validators"]) + self.assertFieldFailsValidationWithMessage( + mtv.full_clean, + "f_with_iterable_of_validators", + ["This is not the answer to life, universe and everything!"], + ) diff --git a/testbed/django__django/tests/validation/tests.py b/testbed/django__django/tests/validation/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..43de77a44ae1e2f4082252138a582787f01682d8 --- /dev/null +++ b/testbed/django__django/tests/validation/tests.py @@ -0,0 +1,208 @@ +from django import forms +from django.core.exceptions import NON_FIELD_ERRORS +from django.test import TestCase +from django.utils.functional import lazy + +from . import ValidationAssertions +from .models import ( + Article, + Author, + GenericIPAddressTestModel, + GenericIPAddrUnpackUniqueTest, + ModelToValidate, +) + + +class BaseModelValidationTests(ValidationAssertions, TestCase): + def test_missing_required_field_raises_error(self): + mtv = ModelToValidate(f_with_custom_validator=42) + self.assertFailsValidation(mtv.full_clean, ["name", "number"]) + + def test_with_correct_value_model_validates(self): + mtv = ModelToValidate(number=10, name="Some Name") + self.assertIsNone(mtv.full_clean()) + + def test_custom_validate_method(self): + mtv = ModelToValidate(number=11) + self.assertFailsValidation(mtv.full_clean, [NON_FIELD_ERRORS, "name"]) + + def test_wrong_FK_value_raises_error(self): + mtv = ModelToValidate(number=10, name="Some Name", parent_id=3) + self.assertFieldFailsValidationWithMessage( + mtv.full_clean, + "parent", + ["model to validate instance with id %r does not exist." % mtv.parent_id], + ) + mtv = ModelToValidate(number=10, name="Some Name", ufm_id="Some Name") + self.assertFieldFailsValidationWithMessage( + mtv.full_clean, + "ufm", + [ + "unique fields model instance with unique_charfield %r does not exist." + % mtv.name + ], + ) + + def test_correct_FK_value_validates(self): + parent = ModelToValidate.objects.create(number=10, name="Some Name") + mtv = ModelToValidate(number=10, name="Some Name", parent_id=parent.pk) + self.assertIsNone(mtv.full_clean()) + + def test_limited_FK_raises_error(self): + # The limit_choices_to on the parent field says that a parent object's + # number attribute must be 10, so this should fail validation. + parent = ModelToValidate.objects.create(number=11, name="Other Name") + mtv = ModelToValidate(number=10, name="Some Name", parent_id=parent.pk) + self.assertFailsValidation(mtv.full_clean, ["parent"]) + + def test_FK_validates_using_base_manager(self): + # Archived articles are not available through the default manager, only + # the base manager. + author = Author.objects.create(name="Randy", archived=True) + article = Article(title="My Article", author=author) + self.assertIsNone(article.full_clean()) + + def test_wrong_email_value_raises_error(self): + mtv = ModelToValidate(number=10, name="Some Name", email="not-an-email") + self.assertFailsValidation(mtv.full_clean, ["email"]) + + def test_correct_email_value_passes(self): + mtv = ModelToValidate(number=10, name="Some Name", email="valid@email.com") + self.assertIsNone(mtv.full_clean()) + + def test_wrong_url_value_raises_error(self): + mtv = ModelToValidate(number=10, name="Some Name", url="not a url") + self.assertFieldFailsValidationWithMessage( + mtv.full_clean, "url", ["Enter a valid URL."] + ) + + def test_text_greater_that_charfields_max_length_raises_errors(self): + mtv = ModelToValidate(number=10, name="Some Name" * 100) + self.assertFailsValidation(mtv.full_clean, ["name"]) + + def test_malformed_slug_raises_error(self): + mtv = ModelToValidate(number=10, name="Some Name", slug="##invalid##") + self.assertFailsValidation(mtv.full_clean, ["slug"]) + + def test_full_clean_does_not_mutate_exclude(self): + mtv = ModelToValidate(f_with_custom_validator=42) + exclude = ["number"] + self.assertFailsValidation(mtv.full_clean, ["name"], exclude=exclude) + self.assertEqual(len(exclude), 1) + self.assertEqual(exclude[0], "number") + + +class ArticleForm(forms.ModelForm): + class Meta: + model = Article + exclude = ["author"] + + +class ModelFormsTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.author = Author.objects.create(name="Joseph Kocherhans") + + def test_partial_validation(self): + # Make sure the "commit=False and set field values later" idiom still + # works with model validation. + data = { + "title": "The state of model validation", + "pub_date": "2010-1-10 14:49:00", + } + form = ArticleForm(data) + self.assertEqual(list(form.errors), []) + article = form.save(commit=False) + article.author = self.author + article.save() + + def test_validation_with_empty_blank_field(self): + # Since a value for pub_date wasn't provided and the field is + # blank=True, model-validation should pass. + # Also, Article.clean() should be run, so pub_date will be filled after + # validation, so the form should save cleanly even though pub_date is + # not allowed to be null. + data = { + "title": "The state of model validation", + } + article = Article(author_id=self.author.id) + form = ArticleForm(data, instance=article) + self.assertEqual(list(form.errors), []) + self.assertIsNotNone(form.instance.pub_date) + article = form.save() + + def test_validation_with_invalid_blank_field(self): + # Even though pub_date is set to blank=True, an invalid value was + # provided, so it should fail validation. + data = {"title": "The state of model validation", "pub_date": "never"} + article = Article(author_id=self.author.id) + form = ArticleForm(data, instance=article) + self.assertEqual(list(form.errors), ["pub_date"]) + + +class GenericIPAddressFieldTests(ValidationAssertions, TestCase): + def test_correct_generic_ip_passes(self): + giptm = GenericIPAddressTestModel(generic_ip="1.2.3.4") + self.assertIsNone(giptm.full_clean()) + giptm = GenericIPAddressTestModel(generic_ip=" 1.2.3.4 ") + self.assertIsNone(giptm.full_clean()) + giptm = GenericIPAddressTestModel(generic_ip="1.2.3.4\n") + self.assertIsNone(giptm.full_clean()) + giptm = GenericIPAddressTestModel(generic_ip="2001::2") + self.assertIsNone(giptm.full_clean()) + + def test_invalid_generic_ip_raises_error(self): + giptm = GenericIPAddressTestModel(generic_ip="294.4.2.1") + self.assertFailsValidation(giptm.full_clean, ["generic_ip"]) + giptm = GenericIPAddressTestModel(generic_ip="1:2") + self.assertFailsValidation(giptm.full_clean, ["generic_ip"]) + giptm = GenericIPAddressTestModel(generic_ip=1) + self.assertFailsValidation(giptm.full_clean, ["generic_ip"]) + giptm = GenericIPAddressTestModel(generic_ip=lazy(lambda: 1, int)) + self.assertFailsValidation(giptm.full_clean, ["generic_ip"]) + + def test_correct_v4_ip_passes(self): + giptm = GenericIPAddressTestModel(v4_ip="1.2.3.4") + self.assertIsNone(giptm.full_clean()) + + def test_invalid_v4_ip_raises_error(self): + giptm = GenericIPAddressTestModel(v4_ip="294.4.2.1") + self.assertFailsValidation(giptm.full_clean, ["v4_ip"]) + giptm = GenericIPAddressTestModel(v4_ip="2001::2") + self.assertFailsValidation(giptm.full_clean, ["v4_ip"]) + + def test_correct_v6_ip_passes(self): + giptm = GenericIPAddressTestModel(v6_ip="2001::2") + self.assertIsNone(giptm.full_clean()) + + def test_invalid_v6_ip_raises_error(self): + giptm = GenericIPAddressTestModel(v6_ip="1.2.3.4") + self.assertFailsValidation(giptm.full_clean, ["v6_ip"]) + giptm = GenericIPAddressTestModel(v6_ip="1:2") + self.assertFailsValidation(giptm.full_clean, ["v6_ip"]) + + def test_v6_uniqueness_detection(self): + # These two addresses are the same with different syntax + giptm = GenericIPAddressTestModel(generic_ip="2001::1:0:0:0:0:2") + giptm.save() + giptm = GenericIPAddressTestModel(generic_ip="2001:0:1:2") + self.assertFailsValidation(giptm.full_clean, ["generic_ip"]) + + def test_v4_unpack_uniqueness_detection(self): + # These two are different, because we are not doing IPv4 unpacking + giptm = GenericIPAddressTestModel(generic_ip="::ffff:10.10.10.10") + giptm.save() + giptm = GenericIPAddressTestModel(generic_ip="10.10.10.10") + self.assertIsNone(giptm.full_clean()) + + # These two are the same, because we are doing IPv4 unpacking + giptm = GenericIPAddrUnpackUniqueTest(generic_v4unpack_ip="::ffff:18.52.18.52") + giptm.save() + giptm = GenericIPAddrUnpackUniqueTest(generic_v4unpack_ip="18.52.18.52") + self.assertFailsValidation(giptm.full_clean, ["generic_v4unpack_ip"]) + + def test_empty_generic_ip_passes(self): + giptm = GenericIPAddressTestModel(generic_ip="") + self.assertIsNone(giptm.full_clean()) + giptm = GenericIPAddressTestModel(generic_ip=None) + self.assertIsNone(giptm.full_clean()) diff --git a/testbed/django__django/tests/validators/__init__.py b/testbed/django__django/tests/validators/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/validators/tests.py b/testbed/django__django/tests/validators/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..e689435e3c1b2d3a000042fa6cd5a16198ad2c6b --- /dev/null +++ b/testbed/django__django/tests/validators/tests.py @@ -0,0 +1,841 @@ +import re +import types +from datetime import datetime, timedelta +from decimal import Decimal +from unittest import TestCase, mock + +from django.core.exceptions import ValidationError +from django.core.files.base import ContentFile +from django.core.validators import ( + BaseValidator, + DecimalValidator, + EmailValidator, + FileExtensionValidator, + MaxLengthValidator, + MaxValueValidator, + MinLengthValidator, + MinValueValidator, + ProhibitNullCharactersValidator, + RegexValidator, + StepValueValidator, + URLValidator, + int_list_validator, + validate_comma_separated_integer_list, + validate_email, + validate_image_file_extension, + validate_integer, + validate_ipv4_address, + validate_ipv6_address, + validate_ipv46_address, + validate_slug, + validate_unicode_slug, +) +from django.test import SimpleTestCase + +try: + from PIL import Image # noqa +except ImportError: + PILLOW_IS_INSTALLED = False +else: + PILLOW_IS_INSTALLED = True + +NOW = datetime.now() +EXTENDED_SCHEMES = ["http", "https", "ftp", "ftps", "git", "file", "git+ssh"] + +VALID_URLS = [ + "http://www.djangoproject.com/", + "HTTP://WWW.DJANGOPROJECT.COM/", + "http://localhost/", + "http://example.com/", + "http://example.com:0", + "http://example.com:0/", + "http://example.com:65535", + "http://example.com:65535/", + "http://example.com./", + "http://www.example.com/", + "http://www.example.com:8000/test", + "http://valid-with-hyphens.com/", + "http://subdomain.example.com/", + "http://a.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "http://200.8.9.10/", + "http://200.8.9.10:8000/test", + "http://su--b.valid-----hyphens.com/", + "http://example.com?something=value", + "http://example.com/index.php?something=value&another=value2", + "https://example.com/", + "ftp://example.com/", + "ftps://example.com/", + "http://foo.com/blah_blah", + "http://foo.com/blah_blah/", + "http://foo.com/blah_blah_(wikipedia)", + "http://foo.com/blah_blah_(wikipedia)_(again)", + "http://www.example.com/wpstyle/?p=364", + "https://www.example.com/foo/?bar=baz&inga=42&quux", + "http://✪df.ws/123", + "http://userid@example.com", + "http://userid@example.com/", + "http://userid@example.com:8080", + "http://userid@example.com:8080/", + "http://userid@example.com:65535", + "http://userid@example.com:65535/", + "http://userid:@example.com", + "http://userid:@example.com/", + "http://userid:@example.com:8080", + "http://userid:@example.com:8080/", + "http://userid:password@example.com", + "http://userid:password@example.com/", + "http://userid:password@example.com:8", + "http://userid:password@example.com:8/", + "http://userid:password@example.com:8080", + "http://userid:password@example.com:8080/", + "http://userid:password@example.com:65535", + "http://userid:password@example.com:65535/", + "https://userid:paaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaassword@example.com", + "https://userid:paaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaassword@example.com:8080", + "https://useridddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + "ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + "ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + "dddddddddddddddddddddd:password@example.com", + "https://useridddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + "ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + "ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + "ddddddddddddddddd:password@example.com:8080", + "http://userid:password" + "d" * 2000 + "@example.aaaaaaaaaaaaa.com", + "http://142.42.1.1/", + "http://142.42.1.1:8080/", + "http://➡.ws/䨹", + "http://⌘.ws", + "http://⌘.ws/", + "http://foo.com/blah_(wikipedia)#cite-1", + "http://foo.com/blah_(wikipedia)_blah#cite-1", + "http://foo.com/unicode_(✪)_in_parens", + "http://foo.com/(something)?after=parens", + "http://☺.damowmow.com/", + "http://djangoproject.com/events/#&product=browser", + "http://j.mp", + "ftp://foo.bar/baz", + "http://foo.bar/?q=Test%20URL-encoded%20stuff", + "http://مثال.إختبار", + "http://例子.测试", + "http://उदाहरण.परीक्षा", + "http://-.~_!$&'()*+,;=%40:80%2f@example.com", + "http://xn--7sbb4ac0ad0be6cf.xn--p1ai", + "http://1337.net", + "http://a.b-c.de", + "http://223.255.255.254", + "ftps://foo.bar/", + "http://10.1.1.254", + "http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:80/index.html", + "http://[::192.9.5.5]/ipng", + "http://[::ffff:192.9.5.5]/ipng", + "http://[::1]:8080/", + "http://0.0.0.0/", + "http://255.255.255.255", + "http://224.0.0.0", + "http://224.1.1.1", + "http://111.112.113.114/", + "http://88.88.88.88/", + "http://11.12.13.14/", + "http://10.20.30.40/", + "http://1.2.3.4/", + "http://127.0.01.09.home.lan", + "http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.ex" + "ample.com", + "http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaa.com", + "http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "http://dashintld.c-m", + "http://multipledashintld.a-b-c", + "http://evenmoredashintld.a---c", + "http://dashinpunytld.xn---c", +] + +INVALID_URLS = [ + None, + 56, + "no_scheme", + "foo", + "http://", + "http://example", + "http://example.", + "http://example.com:-1", + "http://example.com:-1/", + "http://example.com:000000080", + "http://example.com:000000080/", + "http://.com", + "http://invalid-.com", + "http://-invalid.com", + "http://invalid.com-", + "http://invalid.-com", + "http://inv-.alid-.com", + "http://inv-.-alid.com", + "file://localhost/path", + "git://example.com/", + "http://.", + "http://..", + "http://../", + "http://?", + "http://??", + "http://??/", + "http://#", + "http://##", + "http://##/", + "http://foo.bar?q=Spaces should be encoded", + "//", + "//a", + "///a", + "///", + "http:///a", + "foo.com", + "rdar://1234", + "h://test", + "http:// shouldfail.com", + ":// should fail", + "http://foo.bar/foo(bar)baz quux", + "http://-error-.invalid/", + "http://dashinpunytld.trailingdot.xn--.", + "http://dashinpunytld.xn---", + "http://-a.b.co", + "http://a.b-.co", + "http://a.-b.co", + "http://a.b-.c.co", + "http:/", + "http://", + "http://", + "http://1.1.1.1.1", + "http://123.123.123", + "http://3628126748", + "http://123", + "http://000.000.000.000", + "http://016.016.016.016", + "http://192.168.000.001", + "http://01.2.3.4", + "http://01.2.3.4", + "http://1.02.3.4", + "http://1.2.03.4", + "http://1.2.3.04", + "http://.www.foo.bar/", + "http://.www.foo.bar./", + "http://[::1:2::3]:8/", + "http://[::1:2::3]:8080/", + "http://[]", + "http://[]:8080", + "http://example..com/", + "http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.e" + "xample.com", + "http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaa.com", + "http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaa", + "http://example." + ("a" * 63 + ".") * 1000 + "com", + "http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaa." + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaa", + "https://test.[com", + "http://@example.com", + "http://:@example.com", + "http://:bar@example.com", + "http://foo@bar@example.com", + "http://foo/bar@example.com", + "http://foo:bar:baz@example.com", + "http://foo:bar@baz@example.com", + "http://foo:bar/baz@example.com", + "http://invalid-.com/?m=foo@example.com", + # Newlines and tabs are not accepted. + "http://www.djangoproject.com/\n", + "http://[::ffff:192.9.5.5]\n", + "http://www.djangoproject.com/\r", + "http://[::ffff:192.9.5.5]\r", + "http://www.django\rproject.com/", + "http://[::\rffff:192.9.5.5]", + "http://\twww.djangoproject.com/", + "http://\t[::ffff:192.9.5.5]", + # Trailing junk does not take forever to reject. + "http://www.asdasdasdasdsadfm.com.br ", + "http://www.asdasdasdasdsadfm.com.br z", +] + +TEST_DATA = [ + # (validator, value, expected), + (validate_integer, "42", None), + (validate_integer, "-42", None), + (validate_integer, -42, None), + (validate_integer, -42.5, ValidationError), + (validate_integer, None, ValidationError), + (validate_integer, "a", ValidationError), + (validate_integer, "\n42", ValidationError), + (validate_integer, "42\n", ValidationError), + (validate_email, "email@here.com", None), + (validate_email, "weirder-email@here.and.there.com", None), + (validate_email, "email@[127.0.0.1]", None), + (validate_email, "email@[2001:dB8::1]", None), + (validate_email, "email@[2001:dB8:0:0:0:0:0:1]", None), + (validate_email, "email@[::fffF:127.0.0.1]", None), + (validate_email, "example@valid-----hyphens.com", None), + (validate_email, "example@valid-with-hyphens.com", None), + (validate_email, "test@domain.with.idn.tld.उदाहरण.परीक्षा", None), + (validate_email, "email@localhost", None), + (EmailValidator(allowlist=["localdomain"]), "email@localdomain", None), + (validate_email, '"test@test"@example.com', None), + (validate_email, "example@atm.%s" % ("a" * 63), None), + (validate_email, "example@%s.atm" % ("a" * 63), None), + (validate_email, "example@%s.%s.atm" % ("a" * 63, "b" * 10), None), + (validate_email, "example@atm.%s" % ("a" * 64), ValidationError), + (validate_email, "example@%s.atm.%s" % ("b" * 64, "a" * 63), ValidationError), + (validate_email, "example@%scom" % (("a" * 63 + ".") * 100), ValidationError), + (validate_email, None, ValidationError), + (validate_email, "", ValidationError), + (validate_email, "abc", ValidationError), + (validate_email, "abc@", ValidationError), + (validate_email, "abc@bar", ValidationError), + (validate_email, "a @x.cz", ValidationError), + (validate_email, "abc@.com", ValidationError), + (validate_email, "something@@somewhere.com", ValidationError), + (validate_email, "email@127.0.0.1", ValidationError), + (validate_email, "email@[127.0.0.256]", ValidationError), + (validate_email, "email@[2001:db8::12345]", ValidationError), + (validate_email, "email@[2001:db8:0:0:0:0:1]", ValidationError), + (validate_email, "email@[::ffff:127.0.0.256]", ValidationError), + (validate_email, "email@[2001:dg8::1]", ValidationError), + (validate_email, "email@[2001:dG8:0:0:0:0:0:1]", ValidationError), + (validate_email, "email@[::fTzF:127.0.0.1]", ValidationError), + (validate_email, "example@invalid-.com", ValidationError), + (validate_email, "example@-invalid.com", ValidationError), + (validate_email, "example@invalid.com-", ValidationError), + (validate_email, "example@inv-.alid-.com", ValidationError), + (validate_email, "example@inv-.-alid.com", ValidationError), + (validate_email, 'test@example.com\n\n + + +

    + +

    +

    + +

    + + diff --git a/testbed/django__django/tests/view_tests/templates/jsi18n.html b/testbed/django__django/tests/view_tests/templates/jsi18n.html new file mode 100644 index 0000000000000000000000000000000000000000..40ae8e7221c5b7da75641c768765d9621575c496 --- /dev/null +++ b/testbed/django__django/tests/view_tests/templates/jsi18n.html @@ -0,0 +1,59 @@ + + + + + + +

    + +

    + +

    + +

    + +

    + +

    + +

    + +

    + +

    + + +

    + +

    + +

    + +

    + +

    + +

    + +

    + + + diff --git a/testbed/django__django/tests/view_tests/templates/my_technical_500.html b/testbed/django__django/tests/view_tests/templates/my_technical_500.html new file mode 100644 index 0000000000000000000000000000000000000000..7e8f7a900d98ecd45f585c4943c490e3b88a4944 --- /dev/null +++ b/testbed/django__django/tests/view_tests/templates/my_technical_500.html @@ -0,0 +1 @@ +

    Oh no, an error occurred!

    diff --git a/testbed/django__django/tests/view_tests/templates/my_technical_500.txt b/testbed/django__django/tests/view_tests/templates/my_technical_500.txt new file mode 100644 index 0000000000000000000000000000000000000000..e2c7727981056eca8b5a9a41d5bc9646d18c2e20 --- /dev/null +++ b/testbed/django__django/tests/view_tests/templates/my_technical_500.txt @@ -0,0 +1 @@ +Oh dear, an error occurred! diff --git a/testbed/django__django/tests/view_tests/templatetags/__init__.py b/testbed/django__django/tests/view_tests/templatetags/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/view_tests/templatetags/debugtags.py b/testbed/django__django/tests/view_tests/templatetags/debugtags.py new file mode 100644 index 0000000000000000000000000000000000000000..7f0df767a3e61218be7effebfb078a8e4e93a802 --- /dev/null +++ b/testbed/django__django/tests/view_tests/templatetags/debugtags.py @@ -0,0 +1,8 @@ +from django import template + +register = template.Library() + + +@register.simple_tag +def go_boom(): + raise Exception("boom") diff --git a/testbed/django__django/tests/view_tests/tests/__init__.py b/testbed/django__django/tests/view_tests/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/view_tests/tests/test_csrf.py b/testbed/django__django/tests/view_tests/tests/test_csrf.py new file mode 100644 index 0000000000000000000000000000000000000000..ef4a50dd45081f497acd9feda60698483854534f --- /dev/null +++ b/testbed/django__django/tests/view_tests/tests/test_csrf.py @@ -0,0 +1,133 @@ +from unittest import mock + +from django.template import TemplateDoesNotExist +from django.test import Client, RequestFactory, SimpleTestCase, override_settings +from django.utils.translation import override +from django.views.csrf import CSRF_FAILURE_TEMPLATE_NAME, csrf_failure + + +@override_settings(ROOT_URLCONF="view_tests.urls") +class CsrfViewTests(SimpleTestCase): + def setUp(self): + super().setUp() + self.client = Client(enforce_csrf_checks=True) + + @override_settings( + USE_I18N=True, + MIDDLEWARE=[ + "django.middleware.locale.LocaleMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + ], + ) + def test_translation(self): + """An invalid request is rejected with a localized error message.""" + response = self.client.post("/") + self.assertContains(response, "Forbidden", status_code=403) + self.assertContains( + response, "CSRF verification failed. Request aborted.", status_code=403 + ) + + with self.settings(LANGUAGE_CODE="nl"), override("en-us"): + response = self.client.post("/") + self.assertContains(response, "Verboden", status_code=403) + self.assertContains( + response, + "CSRF-verificatie mislukt. Verzoek afgebroken.", + status_code=403, + ) + + @override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https")) + def test_no_referer(self): + """ + Referer header is strictly checked for POST over HTTPS. Trigger the + exception by sending an incorrect referer. + """ + response = self.client.post("/", headers={"x-forwarded-proto": "https"}) + self.assertContains( + response, + "You are seeing this message because this HTTPS site requires a " + "“Referer header” to be sent by your web browser, but " + "none was sent.", + status_code=403, + ) + self.assertContains( + response, + "If you have configured your browser to disable “Referer” " + "headers, please re-enable them, at least for this site, or for " + "HTTPS connections, or for “same-origin” requests.", + status_code=403, + ) + self.assertContains( + response, + "If you are using the <meta name="referrer" " + "content="no-referrer"> tag or including the " + "“Referrer-Policy: no-referrer” header, please remove them.", + status_code=403, + ) + + def test_no_cookies(self): + """ + The CSRF cookie is checked for POST. Failure to send this cookie should + provide a nice error message. + """ + response = self.client.post("/") + self.assertContains( + response, + "You are seeing this message because this site requires a CSRF " + "cookie when submitting forms. This cookie is required for " + "security reasons, to ensure that your browser is not being " + "hijacked by third parties.", + status_code=403, + ) + + @override_settings(TEMPLATES=[]) + def test_no_django_template_engine(self): + """ + The CSRF view doesn't depend on the TEMPLATES configuration (#24388). + """ + response = self.client.post("/") + self.assertContains(response, "Forbidden", status_code=403) + + @override_settings( + TEMPLATES=[ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "OPTIONS": { + "loaders": [ + ( + "django.template.loaders.locmem.Loader", + { + CSRF_FAILURE_TEMPLATE_NAME: ( + "Test template for CSRF failure" + ) + }, + ), + ], + }, + } + ] + ) + def test_custom_template(self): + """A custom CSRF_FAILURE_TEMPLATE_NAME is used.""" + response = self.client.post("/") + self.assertContains(response, "Test template for CSRF failure", status_code=403) + + def test_custom_template_does_not_exist(self): + """An exception is raised if a nonexistent template is supplied.""" + factory = RequestFactory() + request = factory.post("/") + with self.assertRaises(TemplateDoesNotExist): + csrf_failure(request, template_name="nonexistent.html") + + def test_template_encoding(self): + """ + The template is loaded directly, not via a template loader, and should + be opened as utf-8 charset as is the default specified on template + engines. + """ + from django.views.csrf import Path + + with mock.patch.object(Path, "open") as m: + csrf_failure(mock.MagicMock(), mock.Mock()) + m.assert_called_once_with(encoding="utf-8") diff --git a/testbed/django__django/tests/view_tests/tests/test_debug.py b/testbed/django__django/tests/view_tests/tests/test_debug.py new file mode 100644 index 0000000000000000000000000000000000000000..65f9db89bfe65b7ee0d878a0ed1ebe79ce817151 --- /dev/null +++ b/testbed/django__django/tests/view_tests/tests/test_debug.py @@ -0,0 +1,2063 @@ +import importlib +import inspect +import os +import re +import sys +import tempfile +import threading +from io import StringIO +from pathlib import Path +from unittest import mock, skipIf, skipUnless + +from asgiref.sync import async_to_sync, iscoroutinefunction + +from django.core import mail +from django.core.files.uploadedfile import SimpleUploadedFile +from django.db import DatabaseError, connection +from django.http import Http404, HttpRequest, HttpResponse +from django.shortcuts import render +from django.template import TemplateDoesNotExist +from django.test import RequestFactory, SimpleTestCase, override_settings +from django.test.utils import LoggingCaptureMixin +from django.urls import path, reverse +from django.urls.converters import IntConverter +from django.utils.functional import SimpleLazyObject +from django.utils.regex_helper import _lazy_re_compile +from django.utils.safestring import mark_safe +from django.utils.version import PY311 +from django.views.debug import ( + CallableSettingWrapper, + ExceptionCycleWarning, + ExceptionReporter, +) +from django.views.debug import Path as DebugPath +from django.views.debug import ( + SafeExceptionReporterFilter, + default_urlconf, + get_default_exception_reporter_filter, + technical_404_response, + technical_500_response, +) +from django.views.decorators.debug import sensitive_post_parameters, sensitive_variables + +from ..views import ( + async_sensitive_method_view, + async_sensitive_method_view_nested, + async_sensitive_view, + async_sensitive_view_nested, + custom_exception_reporter_filter_view, + index_page, + multivalue_dict_key_error, + non_sensitive_view, + paranoid_view, + sensitive_args_function_caller, + sensitive_kwargs_function_caller, + sensitive_method_view, + sensitive_view, +) + + +class User: + def __str__(self): + return "jacob" + + +class WithoutEmptyPathUrls: + urlpatterns = [path("url/", index_page, name="url")] + + +class CallableSettingWrapperTests(SimpleTestCase): + """Unittests for CallableSettingWrapper""" + + def test_repr(self): + class WrappedCallable: + def __repr__(self): + return "repr from the wrapped callable" + + def __call__(self): + pass + + actual = repr(CallableSettingWrapper(WrappedCallable())) + self.assertEqual(actual, "repr from the wrapped callable") + + +@override_settings(DEBUG=True, ROOT_URLCONF="view_tests.urls") +class DebugViewTests(SimpleTestCase): + def test_files(self): + with self.assertLogs("django.request", "ERROR"): + response = self.client.get("/raises/") + self.assertEqual(response.status_code, 500) + + data = { + "file_data.txt": SimpleUploadedFile("file_data.txt", b"haha"), + } + with self.assertLogs("django.request", "ERROR"): + response = self.client.post("/raises/", data) + self.assertContains(response, "file_data.txt", status_code=500) + self.assertNotContains(response, "haha", status_code=500) + + def test_400(self): + # When DEBUG=True, technical_500_template() is called. + with self.assertLogs("django.security", "WARNING"): + response = self.client.get("/raises400/") + self.assertContains(response, '
    403 Forbidden", status_code=403) + + # Set up a test 403.html template. + @override_settings( + TEMPLATES=[ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "OPTIONS": { + "loaders": [ + ( + "django.template.loaders.locmem.Loader", + { + "403.html": ( + "This is a test template for a 403 error " + "({{ exception }})." + ), + }, + ), + ], + }, + } + ] + ) + def test_403_template(self): + response = self.client.get("/raises403/") + self.assertContains(response, "test template", status_code=403) + self.assertContains(response, "(Insufficient Permissions).", status_code=403) + + def test_404(self): + response = self.client.get("/raises404/") + self.assertNotContains( + response, + '
    ',
    +            status_code=404,
    +        )
    +        self.assertContains(
    +            response,
    +            "

    The current path, not-in-urls, didn’t match any " + "of these.

    ", + status_code=404, + html=True, + ) + + def test_404_not_in_urls(self): + response = self.client.get("/not-in-urls") + self.assertNotContains(response, "Raised by:", status_code=404) + self.assertNotContains( + response, + '
    ',
    +            status_code=404,
    +        )
    +        self.assertContains(
    +            response, "Django tried these URL patterns", status_code=404
    +        )
    +        self.assertContains(
    +            response,
    +            "

    The current path, not-in-urls, didn’t match any " + "of these.

    ", + status_code=404, + html=True, + ) + # Pattern and view name of a RegexURLPattern appear. + self.assertContains( + response, r"^regex-post/(?P<pk>[0-9]+)/$", status_code=404 + ) + self.assertContains(response, "[name='regex-post']", status_code=404) + # Pattern and view name of a RoutePattern appear. + self.assertContains(response, r"path-post/<int:pk>/", status_code=404) + self.assertContains(response, "[name='path-post']", status_code=404) + + @override_settings(ROOT_URLCONF=WithoutEmptyPathUrls) + def test_404_empty_path_not_in_urls(self): + response = self.client.get("/") + self.assertContains( + response, + "

    The empty path didn’t match any of these.

    ", + status_code=404, + html=True, + ) + + def test_technical_404(self): + response = self.client.get("/technical404/") + self.assertContains( + response, + '
    Testing technical 404.
    ', + status_code=404, + html=True, + ) + self.assertContains(response, "Raised by:", status_code=404) + self.assertContains( + response, + "view_tests.views.technical404", + status_code=404, + ) + self.assertContains( + response, + "

    The current path, technical404/, matched the " + "last one.

    ", + status_code=404, + html=True, + ) + + def test_classbased_technical_404(self): + response = self.client.get("/classbased404/") + self.assertContains( + response, + "Raised by:view_tests.views.Http404View", + status_code=404, + html=True, + ) + + def test_technical_500(self): + with self.assertLogs("django.request", "ERROR"): + response = self.client.get("/raises500/") + self.assertContains( + response, + "Raised during:view_tests.views.raises500", + status_code=500, + html=True, + ) + with self.assertLogs("django.request", "ERROR"): + response = self.client.get("/raises500/", headers={"accept": "text/plain"}) + self.assertContains( + response, + "Raised during: view_tests.views.raises500", + status_code=500, + ) + + def test_classbased_technical_500(self): + with self.assertLogs("django.request", "ERROR"): + response = self.client.get("/classbased500/") + self.assertContains( + response, + "Raised during:view_tests.views.Raises500View", + status_code=500, + html=True, + ) + with self.assertLogs("django.request", "ERROR"): + response = self.client.get( + "/classbased500/", headers={"accept": "text/plain"} + ) + self.assertContains( + response, + "Raised during: view_tests.views.Raises500View", + status_code=500, + ) + + def test_non_l10ned_numeric_ids(self): + """ + Numeric IDs and fancy traceback context blocks line numbers shouldn't + be localized. + """ + with self.settings(DEBUG=True): + with self.assertLogs("django.request", "ERROR"): + response = self.client.get("/raises500/") + # We look for a HTML fragment of the form + # '
    ', + # not '
    ', response.content + ) + self.assertIsNotNone(match) + id_repr = match["id"] + self.assertFalse( + re.search(b"[^c0-9]", id_repr), + "Numeric IDs in debug response HTML page shouldn't be localized " + "(value: %s)." % id_repr.decode(), + ) + + def test_template_exceptions(self): + with self.assertLogs("django.request", "ERROR"): + try: + self.client.get(reverse("template_exception")) + except Exception: + raising_loc = inspect.trace()[-1][-2][0].strip() + self.assertNotEqual( + raising_loc.find('raise Exception("boom")'), + -1, + "Failed to find 'raise Exception' in last frame of " + "traceback, instead found: %s" % raising_loc, + ) + + @skipIf( + sys.platform == "win32", + "Raises OSError instead of TemplateDoesNotExist on Windows.", + ) + def test_safestring_in_exception(self): + with self.assertLogs("django.request", "ERROR"): + response = self.client.get("/safestring_exception/") + self.assertNotContains( + response, + "", + status_code=500, + html=True, + ) + self.assertContains( + response, + "<script>alert(1);</script>", + count=3, + status_code=500, + html=True, + ) + + def test_template_loader_postmortem(self): + """Tests for not existing file""" + template_name = "notfound.html" + with tempfile.NamedTemporaryFile(prefix=template_name) as tmpfile: + tempdir = os.path.dirname(tmpfile.name) + template_path = os.path.join(tempdir, template_name) + with override_settings( + TEMPLATES=[ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [tempdir], + } + ] + ), self.assertLogs("django.request", "ERROR"): + response = self.client.get( + reverse( + "raises_template_does_not_exist", kwargs={"path": template_name} + ) + ) + self.assertContains( + response, + "%s (Source does not exist)" % template_path, + status_code=500, + count=2, + ) + # Assert as HTML. + self.assertContains( + response, + "
  • django.template.loaders.filesystem.Loader: " + "%s (Source does not exist)
  • " + % os.path.join(tempdir, "notfound.html"), + status_code=500, + html=True, + ) + + def test_no_template_source_loaders(self): + """ + Make sure if you don't specify a template, the debug view doesn't blow up. + """ + with self.assertLogs("django.request", "ERROR"): + with self.assertRaises(TemplateDoesNotExist): + self.client.get("/render_no_template/") + + @override_settings(ROOT_URLCONF="view_tests.default_urls") + def test_default_urlconf_template(self): + """ + Make sure that the default URLconf template is shown instead of the + technical 404 page, if the user has not altered their URLconf yet. + """ + response = self.client.get("/") + self.assertContains( + response, "

    The install worked successfully! Congratulations!

    " + ) + + @override_settings(ROOT_URLCONF="view_tests.regression_21530_urls") + def test_regression_21530(self): + """ + Regression test for bug #21530. + + If the admin app include is replaced with exactly one url + pattern, then the technical 404 template should be displayed. + + The bug here was that an AttributeError caused a 500 response. + """ + response = self.client.get("/") + self.assertContains( + response, "Page not found (404)", status_code=404 + ) + + def test_template_encoding(self): + """ + The templates are loaded directly, not via a template loader, and + should be opened as utf-8 charset as is the default specified on + template engines. + """ + with mock.patch.object(DebugPath, "open") as m: + default_urlconf(None) + m.assert_called_once_with(encoding="utf-8") + m.reset_mock() + technical_404_response(mock.MagicMock(), mock.Mock()) + m.assert_called_once_with(encoding="utf-8") + + def test_technical_404_converter_raise_404(self): + with mock.patch.object(IntConverter, "to_python", side_effect=Http404): + response = self.client.get("/path-post/1/") + self.assertContains(response, "Page not found", status_code=404) + + def test_exception_reporter_from_request(self): + with self.assertLogs("django.request", "ERROR"): + response = self.client.get("/custom_reporter_class_view/") + self.assertContains(response, "custom traceback text", status_code=500) + + @override_settings( + DEFAULT_EXCEPTION_REPORTER="view_tests.views.CustomExceptionReporter" + ) + def test_exception_reporter_from_settings(self): + with self.assertLogs("django.request", "ERROR"): + response = self.client.get("/raises500/") + self.assertContains(response, "custom traceback text", status_code=500) + + @override_settings( + DEFAULT_EXCEPTION_REPORTER="view_tests.views.TemplateOverrideExceptionReporter" + ) + def test_template_override_exception_reporter(self): + with self.assertLogs("django.request", "ERROR"): + response = self.client.get("/raises500/") + self.assertContains( + response, + "

    Oh no, an error occurred!

    ", + status_code=500, + html=True, + ) + + with self.assertLogs("django.request", "ERROR"): + response = self.client.get("/raises500/", headers={"accept": "text/plain"}) + self.assertContains(response, "Oh dear, an error occurred!", status_code=500) + + +class DebugViewQueriesAllowedTests(SimpleTestCase): + # May need a query to initialize MySQL connection + databases = {"default"} + + def test_handle_db_exception(self): + """ + Ensure the debug view works when a database exception is raised by + performing an invalid query and passing the exception to the debug view. + """ + with connection.cursor() as cursor: + try: + cursor.execute("INVALID SQL") + except DatabaseError: + exc_info = sys.exc_info() + + rf = RequestFactory() + response = technical_500_response(rf.get("/"), *exc_info) + self.assertContains(response, "OperationalError at /", status_code=500) + + +@override_settings( + DEBUG=True, + ROOT_URLCONF="view_tests.urls", + # No template directories are configured, so no templates will be found. + TEMPLATES=[ + { + "BACKEND": "django.template.backends.dummy.TemplateStrings", + } + ], +) +class NonDjangoTemplatesDebugViewTests(SimpleTestCase): + def test_400(self): + # When DEBUG=True, technical_500_template() is called. + with self.assertLogs("django.security", "WARNING"): + response = self.client.get("/raises400/") + self.assertContains(response, '
    403 Forbidden", status_code=403) + + def test_404(self): + response = self.client.get("/raises404/") + self.assertEqual(response.status_code, 404) + + def test_template_not_found_error(self): + # Raises a TemplateDoesNotExist exception and shows the debug view. + url = reverse( + "raises_template_does_not_exist", kwargs={"path": "notfound.html"} + ) + with self.assertLogs("django.request", "ERROR"): + response = self.client.get(url) + self.assertContains(response, '
    ValueError at /test_view/", html) + self.assertIn( + '
    Can't find my keys
    ', html + ) + self.assertIn("Request Method:", html) + self.assertIn("Request URL:", html) + self.assertIn('

    USER

    ', html) + self.assertIn("

    jacob

    ", html) + self.assertIn("Exception Type:", html) + self.assertIn("Exception Value:", html) + self.assertIn("

    Traceback ", html) + self.assertIn("

    Request information

    ", html) + self.assertNotIn("

    Request data not supplied

    ", html) + self.assertIn("

    No POST data

    ", html) + + def test_no_request(self): + "An exception report can be generated without request" + try: + raise ValueError("Can't find my keys") + except ValueError: + exc_type, exc_value, tb = sys.exc_info() + reporter = ExceptionReporter(None, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertInHTML("

    ValueError

    ", html) + self.assertIn( + '
    Can't find my keys
    ', html + ) + self.assertNotIn("Request Method:", html) + self.assertNotIn("Request URL:", html) + self.assertNotIn('

    USER

    ', html) + self.assertIn("Exception Type:", html) + self.assertIn("Exception Value:", html) + self.assertIn("

    Traceback ", html) + self.assertIn("

    Request information

    ", html) + self.assertIn("

    Request data not supplied

    ", html) + + def test_sharing_traceback(self): + try: + raise ValueError("Oops") + except ValueError: + exc_type, exc_value, tb = sys.exc_info() + reporter = ExceptionReporter(None, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertIn( + '
    ', + html, + ) + + def test_eol_support(self): + """The ExceptionReporter supports Unix, Windows and Macintosh EOL markers""" + LINES = ["print %d" % i for i in range(1, 6)] + reporter = ExceptionReporter(None, None, None, None) + + for newline in ["\n", "\r\n", "\r"]: + fd, filename = tempfile.mkstemp(text=False) + os.write(fd, (newline.join(LINES) + newline).encode()) + os.close(fd) + + try: + self.assertEqual( + reporter._get_lines_from_file(filename, 3, 2), + (1, LINES[1:3], LINES[3], LINES[4:]), + ) + finally: + os.unlink(filename) + + def test_no_exception(self): + "An exception report can be generated for just a request" + request = self.rf.get("/test_view/") + reporter = ExceptionReporter(request, None, None, None) + html = reporter.get_traceback_html() + self.assertInHTML("

    Report at /test_view/

    ", html) + self.assertIn( + '
    No exception message supplied
    ', html + ) + self.assertIn("Request Method:", html) + self.assertIn("Request URL:", html) + self.assertNotIn("Exception Type:", html) + self.assertNotIn("Exception Value:", html) + self.assertNotIn("

    Traceback ", html) + self.assertIn("

    Request information

    ", html) + self.assertNotIn("

    Request data not supplied

    ", html) + + def test_suppressed_context(self): + try: + try: + raise RuntimeError("Can't find my keys") + except RuntimeError: + raise ValueError("Can't find my keys") from None + except ValueError: + exc_type, exc_value, tb = sys.exc_info() + + reporter = ExceptionReporter(None, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertInHTML("

    ValueError

    ", html) + self.assertIn( + '
    Can't find my keys
    ', html + ) + self.assertIn("Exception Type:", html) + self.assertIn("Exception Value:", html) + self.assertIn("

    Traceback ", html) + self.assertIn("

    Request information

    ", html) + self.assertIn("

    Request data not supplied

    ", html) + self.assertNotIn("During handling of the above exception", html) + + def test_innermost_exception_without_traceback(self): + try: + try: + raise RuntimeError("Oops") + except Exception as exc: + new_exc = RuntimeError("My context") + exc.__context__ = new_exc + raise + except Exception: + exc_type, exc_value, tb = sys.exc_info() + + reporter = ExceptionReporter(None, exc_type, exc_value, tb) + frames = reporter.get_traceback_frames() + self.assertEqual(len(frames), 2) + html = reporter.get_traceback_html() + self.assertInHTML("

    RuntimeError

    ", html) + self.assertIn('
    Oops
    ', html) + self.assertIn("Exception Type:", html) + self.assertIn("Exception Value:", html) + self.assertIn("

    Traceback ", html) + self.assertIn("

    Request information

    ", html) + self.assertIn("

    Request data not supplied

    ", html) + self.assertIn( + "During handling of the above exception (My context), another " + "exception occurred", + html, + ) + self.assertInHTML('
  • None
  • ', html) + self.assertIn("Traceback (most recent call last):\n None", html) + + text = reporter.get_traceback_text() + self.assertIn("Exception Type: RuntimeError", text) + self.assertIn("Exception Value: Oops", text) + self.assertIn("Traceback (most recent call last):\n None", text) + self.assertIn( + "During handling of the above exception (My context), another " + "exception occurred", + text, + ) + + @skipUnless(PY311, "Exception notes were added in Python 3.11.") + def test_exception_with_notes(self): + request = self.rf.get("/test_view/") + try: + try: + raise RuntimeError("Oops") + except Exception as err: + err.add_note("First Note") + err.add_note("Second Note") + err.add_note(mark_safe("")) + raise err + except Exception: + exc_type, exc_value, tb = sys.exc_info() + + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertIn( + '
    Oops\nFirst Note\nSecond Note\n'
    +            "<script>alert(1);</script>
    ", + html, + ) + self.assertIn( + "Exception Value: Oops\nFirst Note\nSecond Note\n" + "<script>alert(1);</script>", + html, + ) + + text = reporter.get_traceback_text() + self.assertIn( + "Exception Value: Oops\nFirst Note\nSecond Note\n" + "", + text, + ) + + def test_mid_stack_exception_without_traceback(self): + try: + try: + raise RuntimeError("Inner Oops") + except Exception as exc: + new_exc = RuntimeError("My context") + new_exc.__context__ = exc + raise RuntimeError("Oops") from new_exc + except Exception: + exc_type, exc_value, tb = sys.exc_info() + reporter = ExceptionReporter(None, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertInHTML("

    RuntimeError

    ", html) + self.assertIn('
    Oops
    ', html) + self.assertIn("Exception Type:", html) + self.assertIn("Exception Value:", html) + self.assertIn("

    Traceback ", html) + self.assertInHTML('
  • Traceback: None
  • ', html) + self.assertIn( + "During handling of the above exception (Inner Oops), another " + "exception occurred:\n Traceback: None", + html, + ) + + text = reporter.get_traceback_text() + self.assertIn("Exception Type: RuntimeError", text) + self.assertIn("Exception Value: Oops", text) + self.assertIn("Traceback (most recent call last):", text) + self.assertIn( + "During handling of the above exception (Inner Oops), another " + "exception occurred:\n Traceback: None", + text, + ) + + def test_reporting_of_nested_exceptions(self): + request = self.rf.get("/test_view/") + try: + try: + raise AttributeError(mark_safe("

    Top level

    ")) + except AttributeError as explicit: + try: + raise ValueError(mark_safe("

    Second exception

    ")) from explicit + except ValueError: + raise IndexError(mark_safe("

    Final exception

    ")) + except Exception: + # Custom exception handler, just pass it into ExceptionReporter + exc_type, exc_value, tb = sys.exc_info() + + explicit_exc = ( + "The above exception ({0}) was the direct cause of the following exception:" + ) + implicit_exc = ( + "During handling of the above exception ({0}), another exception occurred:" + ) + + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + # Both messages are twice on page -- one rendered as html, + # one as plain text (for pastebin) + self.assertEqual( + 2, html.count(explicit_exc.format("<p>Top level</p>")) + ) + self.assertEqual( + 2, html.count(implicit_exc.format("<p>Second exception</p>")) + ) + self.assertEqual(10, html.count("<p>Final exception</p>")) + + text = reporter.get_traceback_text() + self.assertIn(explicit_exc.format("

    Top level

    "), text) + self.assertIn(implicit_exc.format("

    Second exception

    "), text) + self.assertEqual(3, text.count("

    Final exception

    ")) + + @skipIf( + sys._xoptions.get("no_debug_ranges", False) + or os.environ.get("PYTHONNODEBUGRANGES", False), + "Fine-grained error locations are disabled.", + ) + @skipUnless(PY311, "Fine-grained error locations were added in Python 3.11.") + def test_highlight_error_position(self): + request = self.rf.get("/test_view/") + try: + try: + raise AttributeError("Top level") + except AttributeError as explicit: + try: + raise ValueError(mark_safe("

    2nd exception

    ")) from explicit + except ValueError: + raise IndexError("Final exception") + except Exception: + exc_type, exc_value, tb = sys.exc_info() + + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertIn( + "
                    raise AttributeError("Top level")\n"
    +            "                     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    ", + html, + ) + self.assertIn( + "
                        raise ValueError(mark_safe("
    +            ""<p>2nd exception</p>")) from explicit\n"
    +            "                         "
    +            "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    ", + html, + ) + self.assertIn( + "
                        raise IndexError("Final exception")\n"
    +            "                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    ", + html, + ) + # Pastebin. + self.assertIn( + " raise AttributeError("Top level")\n" + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + html, + ) + self.assertIn( + " raise ValueError(mark_safe(" + ""<p>2nd exception</p>")) from explicit\n" + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + html, + ) + self.assertIn( + " raise IndexError("Final exception")\n" + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + html, + ) + # Text traceback. + text = reporter.get_traceback_text() + self.assertIn( + ' raise AttributeError("Top level")\n' + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + text, + ) + self.assertIn( + ' raise ValueError(mark_safe("

    2nd exception

    ")) from explicit\n' + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + text, + ) + self.assertIn( + ' raise IndexError("Final exception")\n' + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + text, + ) + + def test_reporting_frames_without_source(self): + try: + source = "def funcName():\n raise Error('Whoops')\nfuncName()" + namespace = {} + code = compile(source, "generated", "exec") + exec(code, namespace) + except Exception: + exc_type, exc_value, tb = sys.exc_info() + request = self.rf.get("/test_view/") + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + frames = reporter.get_traceback_frames() + last_frame = frames[-1] + self.assertEqual(last_frame["context_line"], "") + self.assertEqual(last_frame["filename"], "generated") + self.assertEqual(last_frame["function"], "funcName") + self.assertEqual(last_frame["lineno"], 2) + html = reporter.get_traceback_html() + self.assertIn( + 'generated, line 2, in funcName', + html, + ) + self.assertIn( + 'generated, line 2, in funcName', + html, + ) + self.assertIn( + '"generated", line 2, in funcName\n <source code not available>', + html, + ) + text = reporter.get_traceback_text() + self.assertIn( + '"generated", line 2, in funcName\n ', + text, + ) + + def test_reporting_frames_source_not_match(self): + try: + source = "def funcName():\n raise Error('Whoops')\nfuncName()" + namespace = {} + code = compile(source, "generated", "exec") + exec(code, namespace) + except Exception: + exc_type, exc_value, tb = sys.exc_info() + with mock.patch( + "django.views.debug.ExceptionReporter._get_source", + return_value=["wrong source"], + ): + request = self.rf.get("/test_view/") + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + frames = reporter.get_traceback_frames() + last_frame = frames[-1] + self.assertEqual(last_frame["context_line"], "") + self.assertEqual(last_frame["filename"], "generated") + self.assertEqual(last_frame["function"], "funcName") + self.assertEqual(last_frame["lineno"], 2) + html = reporter.get_traceback_html() + self.assertIn( + 'generated, line 2, in funcName', + html, + ) + self.assertIn( + 'generated, line 2, in funcName', + html, + ) + self.assertIn( + '"generated", line 2, in funcName\n' + " <source code not available>", + html, + ) + text = reporter.get_traceback_text() + self.assertIn( + '"generated", line 2, in funcName\n ', + text, + ) + + def test_reporting_frames_for_cyclic_reference(self): + try: + + def test_func(): + try: + raise RuntimeError("outer") from RuntimeError("inner") + except RuntimeError as exc: + raise exc.__cause__ + + test_func() + except Exception: + exc_type, exc_value, tb = sys.exc_info() + request = self.rf.get("/test_view/") + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + + def generate_traceback_frames(*args, **kwargs): + nonlocal tb_frames + tb_frames = reporter.get_traceback_frames() + + tb_frames = None + tb_generator = threading.Thread(target=generate_traceback_frames, daemon=True) + msg = ( + "Cycle in the exception chain detected: exception 'inner' " + "encountered again." + ) + with self.assertWarnsMessage(ExceptionCycleWarning, msg): + tb_generator.start() + tb_generator.join(timeout=5) + if tb_generator.is_alive(): + # tb_generator is a daemon that runs until the main thread/process + # exits. This is resource heavy when running the full test suite. + # Setting the following values to None makes + # reporter.get_traceback_frames() exit early. + exc_value.__traceback__ = exc_value.__context__ = exc_value.__cause__ = None + tb_generator.join() + self.fail("Cyclic reference in Exception Reporter.get_traceback_frames()") + if tb_frames is None: + # can happen if the thread generating traceback got killed + # or exception while generating the traceback + self.fail("Traceback generation failed") + last_frame = tb_frames[-1] + self.assertIn("raise exc.__cause__", last_frame["context_line"]) + self.assertEqual(last_frame["filename"], __file__) + self.assertEqual(last_frame["function"], "test_func") + + def test_request_and_message(self): + "A message can be provided in addition to a request" + request = self.rf.get("/test_view/") + reporter = ExceptionReporter(request, None, "I'm a little teapot", None) + html = reporter.get_traceback_html() + self.assertInHTML("

    Report at /test_view/

    ", html) + self.assertIn( + '
    I'm a little teapot
    ', html + ) + self.assertIn("Request Method:", html) + self.assertIn("Request URL:", html) + self.assertNotIn("Exception Type:", html) + self.assertNotIn("Exception Value:", html) + self.assertIn("

    Traceback ", html) + self.assertIn("

    Request information

    ", html) + self.assertNotIn("

    Request data not supplied

    ", html) + + def test_message_only(self): + reporter = ExceptionReporter(None, None, "I'm a little teapot", None) + html = reporter.get_traceback_html() + self.assertInHTML("

    Report

    ", html) + self.assertIn( + '
    I'm a little teapot
    ', html + ) + self.assertNotIn("Request Method:", html) + self.assertNotIn("Request URL:", html) + self.assertNotIn("Exception Type:", html) + self.assertNotIn("Exception Value:", html) + self.assertIn("

    Traceback ", html) + self.assertIn("

    Request information

    ", html) + self.assertIn("

    Request data not supplied

    ", html) + + def test_non_utf8_values_handling(self): + "Non-UTF-8 exceptions/values should not make the output generation choke." + try: + + class NonUtf8Output(Exception): + def __repr__(self): + return b"EXC\xe9EXC" + + somevar = b"VAL\xe9VAL" # NOQA + raise NonUtf8Output() + except Exception: + exc_type, exc_value, tb = sys.exc_info() + reporter = ExceptionReporter(None, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertIn("VAL\\xe9VAL", html) + self.assertIn("EXC\\xe9EXC", html) + + def test_local_variable_escaping(self): + """Safe strings in local variables are escaped.""" + try: + local = mark_safe("

    Local variable

    ") + raise ValueError(local) + except Exception: + exc_type, exc_value, tb = sys.exc_info() + html = ExceptionReporter(None, exc_type, exc_value, tb).get_traceback_html() + self.assertIn( + '
    '<p>Local variable</p>'
    ' + "", + html, + ) + + def test_unprintable_values_handling(self): + "Unprintable values should not make the output generation choke." + try: + + class OomOutput: + def __repr__(self): + raise MemoryError("OOM") + + oomvalue = OomOutput() # NOQA + raise ValueError() + except Exception: + exc_type, exc_value, tb = sys.exc_info() + reporter = ExceptionReporter(None, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertIn('
    Error in formatting', html)
    +
    +    def test_too_large_values_handling(self):
    +        "Large values should not create a large HTML."
    +        large = 256 * 1024
    +        repr_of_str_adds = len(repr(""))
    +        try:
    +
    +            class LargeOutput:
    +                def __repr__(self):
    +                    return repr("A" * large)
    +
    +            largevalue = LargeOutput()  # NOQA
    +            raise ValueError()
    +        except Exception:
    +            exc_type, exc_value, tb = sys.exc_info()
    +        reporter = ExceptionReporter(None, exc_type, exc_value, tb)
    +        html = reporter.get_traceback_html()
    +        self.assertEqual(len(html) // 1024 // 128, 0)  # still fit in 128Kb
    +        self.assertIn(
    +            "<trimmed %d bytes string>" % (large + repr_of_str_adds,), html
    +        )
    +
    +    def test_encoding_error(self):
    +        """
    +        A UnicodeError displays a portion of the problematic string. HTML in
    +        safe strings is escaped.
    +        """
    +        try:
    +            mark_safe("abcdefghijkl

    mnὀp

    qrstuwxyz").encode("ascii") + except Exception: + exc_type, exc_value, tb = sys.exc_info() + reporter = ExceptionReporter(None, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertIn("

    Unicode error hint

    ", html) + self.assertIn("The string that could not be encoded/decoded was: ", html) + self.assertIn("<p>mnὀp</p>", html) + + def test_unfrozen_importlib(self): + """ + importlib is not a frozen app, but its loader thinks it's frozen which + results in an ImportError. Refs #21443. + """ + try: + request = self.rf.get("/test_view/") + importlib.import_module("abc.def.invalid.name") + except Exception: + exc_type, exc_value, tb = sys.exc_info() + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertInHTML("

    ModuleNotFoundError at /test_view/

    ", html) + + def test_ignore_traceback_evaluation_exceptions(self): + """ + Don't trip over exceptions generated by crafted objects when + evaluating them while cleansing (#24455). + """ + + class BrokenEvaluation(Exception): + pass + + def broken_setup(): + raise BrokenEvaluation + + request = self.rf.get("/test_view/") + broken_lazy = SimpleLazyObject(broken_setup) + try: + bool(broken_lazy) + except BrokenEvaluation: + exc_type, exc_value, tb = sys.exc_info() + + self.assertIn( + "BrokenEvaluation", + ExceptionReporter(request, exc_type, exc_value, tb).get_traceback_html(), + "Evaluation exception reason not mentioned in traceback", + ) + + @override_settings(ALLOWED_HOSTS="example.com") + def test_disallowed_host(self): + "An exception report can be generated even for a disallowed host." + request = self.rf.get("/", headers={"host": "evil.com"}) + reporter = ExceptionReporter(request, None, None, None) + html = reporter.get_traceback_html() + self.assertIn("http://evil.com/", html) + + def test_request_with_items_key(self): + """ + An exception report can be generated for requests with 'items' in + request GET, POST, FILES, or COOKIES QueryDicts. + """ + value = 'items
    'Oops'
    ' + # GET + request = self.rf.get("/test_view/?items=Oops") + reporter = ExceptionReporter(request, None, None, None) + html = reporter.get_traceback_html() + self.assertInHTML(value, html) + # POST + request = self.rf.post("/test_view/", data={"items": "Oops"}) + reporter = ExceptionReporter(request, None, None, None) + html = reporter.get_traceback_html() + self.assertInHTML(value, html) + # FILES + fp = StringIO("filecontent") + request = self.rf.post("/test_view/", data={"name": "filename", "items": fp}) + reporter = ExceptionReporter(request, None, None, None) + html = reporter.get_traceback_html() + self.assertInHTML( + 'items
    <InMemoryUploadedFile: '
    +            "items (application/octet-stream)>
    ", + html, + ) + # COOKIES + rf = RequestFactory() + rf.cookies["items"] = "Oops" + request = rf.get("/test_view/") + reporter = ExceptionReporter(request, None, None, None) + html = reporter.get_traceback_html() + self.assertInHTML( + 'items
    'Oops'
    ', html + ) + + def test_exception_fetching_user(self): + """ + The error page can be rendered if the current user can't be retrieved + (such as when the database is unavailable). + """ + + class ExceptionUser: + def __str__(self): + raise Exception() + + request = self.rf.get("/test_view/") + request.user = ExceptionUser() + + try: + raise ValueError("Oops") + except ValueError: + exc_type, exc_value, tb = sys.exc_info() + + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertInHTML("

    ValueError at /test_view/

    ", html) + self.assertIn('
    Oops
    ', html) + self.assertIn('

    USER

    ', html) + self.assertIn("

    [unable to retrieve the current user]

    ", html) + + text = reporter.get_traceback_text() + self.assertIn("USER: [unable to retrieve the current user]", text) + + def test_template_encoding(self): + """ + The templates are loaded directly, not via a template loader, and + should be opened as utf-8 charset as is the default specified on + template engines. + """ + reporter = ExceptionReporter(None, None, None, None) + with mock.patch.object(DebugPath, "open") as m: + reporter.get_traceback_html() + m.assert_called_once_with(encoding="utf-8") + m.reset_mock() + reporter.get_traceback_text() + m.assert_called_once_with(encoding="utf-8") + + @override_settings(ALLOWED_HOSTS=["example.com"]) + def test_get_raw_insecure_uri(self): + factory = RequestFactory(headers={"host": "evil.com"}) + tests = [ + ("////absolute-uri", "http://evil.com//absolute-uri"), + ("/?foo=bar", "http://evil.com/?foo=bar"), + ("/path/with:colons", "http://evil.com/path/with:colons"), + ] + for url, expected in tests: + with self.subTest(url=url): + request = factory.get(url) + reporter = ExceptionReporter(request, None, None, None) + self.assertEqual(reporter._get_raw_insecure_uri(), expected) + + +class PlainTextReportTests(SimpleTestCase): + rf = RequestFactory() + + def test_request_and_exception(self): + "A simple exception report can be generated" + try: + request = self.rf.get("/test_view/") + request.user = User() + raise ValueError("Can't find my keys") + except ValueError: + exc_type, exc_value, tb = sys.exc_info() + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + text = reporter.get_traceback_text() + self.assertIn("ValueError at /test_view/", text) + self.assertIn("Can't find my keys", text) + self.assertIn("Request Method:", text) + self.assertIn("Request URL:", text) + self.assertIn("USER: jacob", text) + self.assertIn("Exception Type:", text) + self.assertIn("Exception Value:", text) + self.assertIn("Traceback (most recent call last):", text) + self.assertIn("Request information:", text) + self.assertNotIn("Request data not supplied", text) + + def test_no_request(self): + "An exception report can be generated without request" + try: + raise ValueError("Can't find my keys") + except ValueError: + exc_type, exc_value, tb = sys.exc_info() + reporter = ExceptionReporter(None, exc_type, exc_value, tb) + text = reporter.get_traceback_text() + self.assertIn("ValueError", text) + self.assertIn("Can't find my keys", text) + self.assertNotIn("Request Method:", text) + self.assertNotIn("Request URL:", text) + self.assertNotIn("USER:", text) + self.assertIn("Exception Type:", text) + self.assertIn("Exception Value:", text) + self.assertIn("Traceback (most recent call last):", text) + self.assertIn("Request data not supplied", text) + + def test_no_exception(self): + "An exception report can be generated for just a request" + request = self.rf.get("/test_view/") + reporter = ExceptionReporter(request, None, None, None) + reporter.get_traceback_text() + + def test_request_and_message(self): + "A message can be provided in addition to a request" + request = self.rf.get("/test_view/") + reporter = ExceptionReporter(request, None, "I'm a little teapot", None) + reporter.get_traceback_text() + + @override_settings(DEBUG=True) + def test_template_exception(self): + request = self.rf.get("/test_view/") + try: + render(request, "debug/template_error.html") + except Exception: + exc_type, exc_value, tb = sys.exc_info() + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + text = reporter.get_traceback_text() + templ_path = Path( + Path(__file__).parents[1], "templates", "debug", "template_error.html" + ) + self.assertIn( + "Template error:\n" + "In template %(path)s, error at line 2\n" + " 'cycle' tag requires at least two arguments\n" + " 1 : Template with error:\n" + " 2 : {%% cycle %%} \n" + " 3 : " % {"path": templ_path}, + text, + ) + + def test_request_with_items_key(self): + """ + An exception report can be generated for requests with 'items' in + request GET, POST, FILES, or COOKIES QueryDicts. + """ + # GET + request = self.rf.get("/test_view/?items=Oops") + reporter = ExceptionReporter(request, None, None, None) + text = reporter.get_traceback_text() + self.assertIn("items = 'Oops'", text) + # POST + request = self.rf.post("/test_view/", data={"items": "Oops"}) + reporter = ExceptionReporter(request, None, None, None) + text = reporter.get_traceback_text() + self.assertIn("items = 'Oops'", text) + # FILES + fp = StringIO("filecontent") + request = self.rf.post("/test_view/", data={"name": "filename", "items": fp}) + reporter = ExceptionReporter(request, None, None, None) + text = reporter.get_traceback_text() + self.assertIn("items = Not Found", response.content) + self.assertIn( + b"

    The requested resource was not found on this server.

    ", + response.content, + ) + + @override_settings( + TEMPLATES=[ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "OPTIONS": { + "loaders": [ + ( + "django.template.loaders.locmem.Loader", + { + "404.html": "{{ csrf_token }}", + }, + ), + ], + }, + } + ] + ) + def test_csrf_token_in_404(self): + """ + The 404 page should have the csrf_token available in the context + """ + # See ticket #14565 + for url in self.nonexistent_urls: + response = self.client.get(url) + self.assertNotEqual(response.content, b"NOTPROVIDED") + self.assertNotEqual(response.content, b"") + + def test_server_error(self): + "The server_error view raises a 500 status" + response = self.client.get("/server_error/") + self.assertContains(response, b"

    Server Error (500)

    ", status_code=500) + + def test_bad_request(self): + request = self.request_factory.get("/") + response = bad_request(request, Exception()) + self.assertContains(response, b"

    Bad Request (400)

    ", status_code=400) + + @override_settings( + TEMPLATES=[ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "OPTIONS": { + "loaders": [ + ( + "django.template.loaders.locmem.Loader", + { + "404.html": ( + "This is a test template for a 404 error " + "(path: {{ request_path }}, " + "exception: {{ exception }})." + ), + "500.html": "This is a test template for a 500 error.", + }, + ), + ], + }, + } + ] + ) + def test_custom_templates(self): + """ + 404.html and 500.html templates are picked by their respective handler. + """ + response = self.client.get("/server_error/") + self.assertContains(response, "test template for a 500 error", status_code=500) + response = self.client.get("/no_such_url/") + self.assertContains(response, "path: /no_such_url/", status_code=404) + self.assertContains(response, "exception: Resolver404", status_code=404) + response = self.client.get("/technical404/") + self.assertContains( + response, "exception: Testing technical 404.", status_code=404 + ) + + def test_get_absolute_url_attributes(self): + "A model can set attributes on the get_absolute_url method" + self.assertTrue( + getattr(UrlArticle.get_absolute_url, "purge", False), + "The attributes of the original get_absolute_url must be added.", + ) + article = UrlArticle.objects.get(pk=self.urlarticle.pk) + self.assertTrue( + getattr(article.get_absolute_url, "purge", False), + "The attributes of the original get_absolute_url must be added.", + ) + + def test_custom_templates_wrong(self): + """ + Default error views should raise TemplateDoesNotExist when passed a + template that doesn't exist. + """ + request = self.request_factory.get("/") + + with self.assertRaises(TemplateDoesNotExist): + bad_request(request, Exception(), template_name="nonexistent") + + with self.assertRaises(TemplateDoesNotExist): + permission_denied(request, Exception(), template_name="nonexistent") + + with self.assertRaises(TemplateDoesNotExist): + page_not_found(request, Http404(), template_name="nonexistent") + + with self.assertRaises(TemplateDoesNotExist): + server_error(request, template_name="nonexistent") + + def test_error_pages(self): + request = self.request_factory.get("/") + for response, title in ( + (bad_request(request, Exception()), b"Bad Request (400)"), + (permission_denied(request, Exception()), b"403 Forbidden"), + (page_not_found(request, Http404()), b"Not Found"), + (server_error(request), b"Server Error (500)"), + ): + with self.subTest(title=title): + self.assertIn(b"", response.content) + self.assertIn(b'', response.content) + self.assertIn(b"", response.content) + self.assertIn(b"%s" % title, response.content) + self.assertIn(b"", response.content) diff --git a/testbed/django__django/tests/view_tests/tests/test_i18n.py b/testbed/django__django/tests/view_tests/tests/test_i18n.py new file mode 100644 index 0000000000000000000000000000000000000000..93e91bcc83e33ae7b33ffbb03297d441364225aa --- /dev/null +++ b/testbed/django__django/tests/view_tests/tests/test_i18n.py @@ -0,0 +1,575 @@ +import gettext +import json +from os import path +from unittest import mock + +from django.conf import settings +from django.test import ( + RequestFactory, + SimpleTestCase, + TestCase, + modify_settings, + override_settings, +) +from django.test.selenium import SeleniumTestCase +from django.urls import reverse +from django.utils.translation import get_language, override +from django.views.i18n import JavaScriptCatalog, get_formats + +from ..urls import locale_dir + + +@override_settings(ROOT_URLCONF="view_tests.urls") +class SetLanguageTests(TestCase): + """Test the django.views.i18n.set_language view.""" + + def _get_inactive_language_code(self): + """Return language code for a language which is not activated.""" + current_language = get_language() + return [code for code, name in settings.LANGUAGES if code != current_language][ + 0 + ] + + def test_setlang(self): + """ + The set_language view can be used to change the session language. + + The user is redirected to the 'next' argument if provided. + """ + lang_code = self._get_inactive_language_code() + post_data = {"language": lang_code, "next": "/"} + response = self.client.post( + "/i18n/setlang/", post_data, headers={"referer": "/i_should_not_be_used/"} + ) + self.assertRedirects(response, "/") + # The language is set in a cookie. + language_cookie = self.client.cookies[settings.LANGUAGE_COOKIE_NAME] + self.assertEqual(language_cookie.value, lang_code) + self.assertEqual(language_cookie["domain"], "") + self.assertEqual(language_cookie["path"], "/") + self.assertEqual(language_cookie["max-age"], "") + self.assertEqual(language_cookie["httponly"], "") + self.assertEqual(language_cookie["samesite"], "") + self.assertEqual(language_cookie["secure"], "") + + def test_setlang_unsafe_next(self): + """ + The set_language view only redirects to the 'next' argument if it is + "safe". + """ + lang_code = self._get_inactive_language_code() + post_data = {"language": lang_code, "next": "//unsafe/redirection/"} + response = self.client.post("/i18n/setlang/", data=post_data) + self.assertEqual(response.url, "/") + self.assertEqual( + self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code + ) + + def test_setlang_http_next(self): + """ + The set_language view only redirects to the 'next' argument if it is + "safe" and its scheme is HTTPS if the request was sent over HTTPS. + """ + lang_code = self._get_inactive_language_code() + non_https_next_url = "http://testserver/redirection/" + post_data = {"language": lang_code, "next": non_https_next_url} + # Insecure URL in POST data. + response = self.client.post("/i18n/setlang/", data=post_data, secure=True) + self.assertEqual(response.url, "/") + self.assertEqual( + self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code + ) + # Insecure URL in HTTP referer. + response = self.client.post( + "/i18n/setlang/", secure=True, headers={"referer": non_https_next_url} + ) + self.assertEqual(response.url, "/") + self.assertEqual( + self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code + ) + + def test_setlang_redirect_to_referer(self): + """ + The set_language view redirects to the URL in the referer header when + there isn't a "next" parameter. + """ + lang_code = self._get_inactive_language_code() + post_data = {"language": lang_code} + response = self.client.post( + "/i18n/setlang/", post_data, headers={"referer": "/i18n/"} + ) + self.assertRedirects(response, "/i18n/", fetch_redirect_response=False) + self.assertEqual( + self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code + ) + + def test_setlang_default_redirect(self): + """ + The set_language view redirects to '/' when there isn't a referer or + "next" parameter. + """ + lang_code = self._get_inactive_language_code() + post_data = {"language": lang_code} + response = self.client.post("/i18n/setlang/", post_data) + self.assertRedirects(response, "/") + self.assertEqual( + self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code + ) + + def test_setlang_performs_redirect_for_ajax_if_explicitly_requested(self): + """ + The set_language view redirects to the "next" parameter for requests + not accepting HTML response content. + """ + lang_code = self._get_inactive_language_code() + post_data = {"language": lang_code, "next": "/"} + response = self.client.post( + "/i18n/setlang/", post_data, headers={"accept": "application/json"} + ) + self.assertRedirects(response, "/") + self.assertEqual( + self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code + ) + + def test_setlang_doesnt_perform_a_redirect_to_referer_for_ajax(self): + """ + The set_language view doesn't redirect to the HTTP referer header if + the request doesn't accept HTML response content. + """ + lang_code = self._get_inactive_language_code() + post_data = {"language": lang_code} + headers = {"HTTP_REFERER": "/", "HTTP_ACCEPT": "application/json"} + response = self.client.post("/i18n/setlang/", post_data, **headers) + self.assertEqual(response.status_code, 204) + self.assertEqual( + self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code + ) + + def test_setlang_doesnt_perform_a_default_redirect_for_ajax(self): + """ + The set_language view returns 204 by default for requests not accepting + HTML response content. + """ + lang_code = self._get_inactive_language_code() + post_data = {"language": lang_code} + response = self.client.post( + "/i18n/setlang/", post_data, headers={"accept": "application/json"} + ) + self.assertEqual(response.status_code, 204) + self.assertEqual( + self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code + ) + + def test_setlang_unsafe_next_for_ajax(self): + """ + The fallback to root URL for the set_language view works for requests + not accepting HTML response content. + """ + lang_code = self._get_inactive_language_code() + post_data = {"language": lang_code, "next": "//unsafe/redirection/"} + response = self.client.post( + "/i18n/setlang/", post_data, headers={"accept": "application/json"} + ) + self.assertEqual(response.url, "/") + self.assertEqual( + self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code + ) + + def test_setlang_reversal(self): + self.assertEqual(reverse("set_language"), "/i18n/setlang/") + + def test_setlang_cookie(self): + # we force saving language to a cookie rather than a session + # by excluding session middleware and those which do require it + test_settings = { + "MIDDLEWARE": ["django.middleware.common.CommonMiddleware"], + "LANGUAGE_COOKIE_NAME": "mylanguage", + "LANGUAGE_COOKIE_AGE": 3600 * 7 * 2, + "LANGUAGE_COOKIE_DOMAIN": ".example.com", + "LANGUAGE_COOKIE_PATH": "/test/", + "LANGUAGE_COOKIE_HTTPONLY": True, + "LANGUAGE_COOKIE_SAMESITE": "Strict", + "LANGUAGE_COOKIE_SECURE": True, + } + with self.settings(**test_settings): + post_data = {"language": "pl", "next": "/views/"} + response = self.client.post("/i18n/setlang/", data=post_data) + language_cookie = response.cookies.get("mylanguage") + self.assertEqual(language_cookie.value, "pl") + self.assertEqual(language_cookie["domain"], ".example.com") + self.assertEqual(language_cookie["path"], "/test/") + self.assertEqual(language_cookie["max-age"], 3600 * 7 * 2) + self.assertIs(language_cookie["httponly"], True) + self.assertEqual(language_cookie["samesite"], "Strict") + self.assertIs(language_cookie["secure"], True) + + def test_setlang_decodes_http_referer_url(self): + """ + The set_language view decodes the HTTP_REFERER URL and preserves an + encoded query string. + """ + # The URL & view must exist for this to work as a regression test. + self.assertEqual( + reverse("with_parameter", kwargs={"parameter": "x"}), "/test-setlang/x/" + ) + lang_code = self._get_inactive_language_code() + # %C3%A4 decodes to ä, %26 to &. + encoded_url = "/test-setlang/%C3%A4/?foo=bar&baz=alpha%26omega" + response = self.client.post( + "/i18n/setlang/", {"language": lang_code}, headers={"referer": encoded_url} + ) + self.assertRedirects(response, encoded_url, fetch_redirect_response=False) + self.assertEqual( + self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code + ) + + @modify_settings( + MIDDLEWARE={ + "append": "django.middleware.locale.LocaleMiddleware", + } + ) + def test_lang_from_translated_i18n_pattern(self): + response = self.client.post( + "/i18n/setlang/", + data={"language": "nl"}, + follow=True, + headers={"referer": "/en/translated/"}, + ) + self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, "nl") + self.assertRedirects(response, "/nl/vertaald/") + # And reverse + response = self.client.post( + "/i18n/setlang/", + data={"language": "en"}, + follow=True, + headers={"referer": "/nl/vertaald/"}, + ) + self.assertRedirects(response, "/en/translated/") + + +@override_settings(ROOT_URLCONF="view_tests.urls") +class I18NViewTests(SimpleTestCase): + """Test django.views.i18n views other than set_language.""" + + @override_settings(LANGUAGE_CODE="de") + def test_get_formats(self): + formats = get_formats() + # Test 3 possible types in get_formats: integer, string, and list. + self.assertEqual(formats["FIRST_DAY_OF_WEEK"], 1) + self.assertEqual(formats["DECIMAL_SEPARATOR"], ",") + self.assertEqual( + formats["TIME_INPUT_FORMATS"], ["%H:%M:%S", "%H:%M:%S.%f", "%H:%M"] + ) + + def test_jsi18n(self): + """The javascript_catalog can be deployed with language settings""" + for lang_code in ["es", "fr", "ru"]: + with override(lang_code): + catalog = gettext.translation("djangojs", locale_dir, [lang_code]) + trans_txt = catalog.gettext("this is to be translated") + response = self.client.get("/jsi18n/") + self.assertEqual( + response.headers["Content-Type"], 'text/javascript; charset="utf-8"' + ) + # response content must include a line like: + # "this is to be translated": + # json.dumps() is used to be able to check Unicode strings. + self.assertContains(response, json.dumps(trans_txt), 1) + if lang_code == "fr": + # Message with context (msgctxt) + self.assertContains(response, '"month name\\u0004May": "mai"', 1) + + @override_settings(USE_I18N=False) + def test_jsi18n_USE_I18N_False(self): + response = self.client.get("/jsi18n/") + # default plural function + self.assertContains( + response, + "django.pluralidx = function(count) { return (count == 1) ? 0 : 1; };", + ) + self.assertNotContains(response, "var newcatalog =") + + def test_jsoni18n(self): + """ + The json_catalog returns the language catalog and settings as JSON. + """ + with override("de"): + response = self.client.get("/jsoni18n/") + data = json.loads(response.content.decode()) + self.assertIn("catalog", data) + self.assertIn("formats", data) + self.assertEqual( + data["formats"]["TIME_INPUT_FORMATS"], + ["%H:%M:%S", "%H:%M:%S.%f", "%H:%M"], + ) + self.assertEqual(data["formats"]["FIRST_DAY_OF_WEEK"], 1) + self.assertIn("plural", data) + self.assertEqual(data["catalog"]["month name\x04May"], "Mai") + self.assertIn("DATETIME_FORMAT", data["formats"]) + self.assertEqual(data["plural"], "(n != 1)") + + def test_jsi18n_with_missing_en_files(self): + """ + The javascript_catalog shouldn't load the fallback language in the + case that the current selected language is actually the one translated + from, and hence missing translation files completely. + + This happens easily when you're translating from English to other + languages and you've set settings.LANGUAGE_CODE to some other language + than English. + """ + with self.settings(LANGUAGE_CODE="es"), override("en-us"): + response = self.client.get("/jsi18n/") + self.assertNotContains(response, "esto tiene que ser traducido") + + def test_jsoni18n_with_missing_en_files(self): + """ + Same as above for the json_catalog view. Here we also check for the + expected JSON format. + """ + with self.settings(LANGUAGE_CODE="es"), override("en-us"): + response = self.client.get("/jsoni18n/") + data = json.loads(response.content.decode()) + self.assertIn("catalog", data) + self.assertIn("formats", data) + self.assertIn("plural", data) + self.assertEqual(data["catalog"], {}) + self.assertIn("DATETIME_FORMAT", data["formats"]) + self.assertIsNone(data["plural"]) + + def test_jsi18n_fallback_language(self): + """ + Let's make sure that the fallback language is still working properly + in cases where the selected language cannot be found. + """ + with self.settings(LANGUAGE_CODE="fr"), override("fi"): + response = self.client.get("/jsi18n/") + self.assertContains(response, "il faut le traduire") + self.assertNotContains(response, "Untranslated string") + + def test_jsi18n_fallback_language_with_custom_locale_dir(self): + """ + The fallback language works when there are several levels of fallback + translation catalogs. + """ + locale_paths = [ + path.join( + path.dirname(path.dirname(path.abspath(__file__))), + "custom_locale_path", + ), + ] + with self.settings(LOCALE_PATHS=locale_paths), override("es_MX"): + response = self.client.get("/jsi18n/") + self.assertContains( + response, "custom_locale_path: esto tiene que ser traducido" + ) + response = self.client.get("/jsi18n_no_packages/") + self.assertContains( + response, "custom_locale_path: esto tiene que ser traducido" + ) + + def test_i18n_fallback_language_plural(self): + """ + The fallback to a language with less plural forms maintains the real + language's number of plural forms and correct translations. + """ + with self.settings(LANGUAGE_CODE="pt"), override("ru"): + response = self.client.get("/jsi18n/") + self.assertEqual( + response.context["catalog"]["{count} plural3"], + ["{count} plural3 p3", "{count} plural3 p3s", "{count} plural3 p3t"], + ) + self.assertEqual( + response.context["catalog"]["{count} plural2"], + ["{count} plural2", "{count} plural2s", ""], + ) + with self.settings(LANGUAGE_CODE="ru"), override("pt"): + response = self.client.get("/jsi18n/") + self.assertEqual( + response.context["catalog"]["{count} plural3"], + ["{count} plural3", "{count} plural3s"], + ) + self.assertEqual( + response.context["catalog"]["{count} plural2"], + ["{count} plural2", "{count} plural2s"], + ) + + def test_i18n_english_variant(self): + with override("en-gb"): + response = self.client.get("/jsi18n/") + self.assertIn( + '"this color is to be translated": "this colour is to be translated"', + response.context["catalog_str"], + ) + + def test_i18n_language_non_english_default(self): + """ + Check if the JavaScript i18n view returns an empty language catalog + if the default language is non-English, the selected language + is English and there is not 'en' translation available. See #13388, + #3594 and #13726 for more details. + """ + with self.settings(LANGUAGE_CODE="fr"), override("en-us"): + response = self.client.get("/jsi18n/") + self.assertNotContains(response, "Choisir une heure") + + @modify_settings(INSTALLED_APPS={"append": "view_tests.app0"}) + def test_non_english_default_english_userpref(self): + """ + Same as above with the difference that there IS an 'en' translation + available. The JavaScript i18n view must return a NON empty language catalog + with the proper English translations. See #13726 for more details. + """ + with self.settings(LANGUAGE_CODE="fr"), override("en-us"): + response = self.client.get("/jsi18n_english_translation/") + self.assertContains(response, "this app0 string is to be translated") + + def test_i18n_language_non_english_fallback(self): + """ + Makes sure that the fallback language is still working properly + in cases where the selected language cannot be found. + """ + with self.settings(LANGUAGE_CODE="fr"), override("none"): + response = self.client.get("/jsi18n/") + self.assertContains(response, "Choisir une heure") + + def test_escaping(self): + # Force a language via GET otherwise the gettext functions are a noop! + response = self.client.get("/jsi18n_admin/?language=de") + self.assertContains(response, "\\x04") + + @modify_settings(INSTALLED_APPS={"append": ["view_tests.app5"]}) + def test_non_BMP_char(self): + """ + Non-BMP characters should not break the javascript_catalog (#21725). + """ + with self.settings(LANGUAGE_CODE="en-us"), override("fr"): + response = self.client.get("/jsi18n/app5/") + self.assertContains(response, "emoji") + self.assertContains(response, "\\ud83d\\udca9") + + @modify_settings(INSTALLED_APPS={"append": ["view_tests.app1", "view_tests.app2"]}) + def test_i18n_language_english_default(self): + """ + Check if the JavaScript i18n view returns a complete language catalog + if the default language is en-us, the selected language has a + translation available and a catalog composed by djangojs domain + translations of multiple Python packages is requested. See #13388, + #3594 and #13514 for more details. + """ + base_trans_string = ( + "il faut traduire cette cha\\u00eene de caract\\u00e8res de " + ) + app1_trans_string = base_trans_string + "app1" + app2_trans_string = base_trans_string + "app2" + with self.settings(LANGUAGE_CODE="en-us"), override("fr"): + response = self.client.get("/jsi18n_multi_packages1/") + self.assertContains(response, app1_trans_string) + self.assertContains(response, app2_trans_string) + + response = self.client.get("/jsi18n/app1/") + self.assertContains(response, app1_trans_string) + self.assertNotContains(response, app2_trans_string) + + response = self.client.get("/jsi18n/app2/") + self.assertNotContains(response, app1_trans_string) + self.assertContains(response, app2_trans_string) + + @modify_settings(INSTALLED_APPS={"append": ["view_tests.app3", "view_tests.app4"]}) + def test_i18n_different_non_english_languages(self): + """ + Similar to above but with neither default or requested language being + English. + """ + with self.settings(LANGUAGE_CODE="fr"), override("es-ar"): + response = self.client.get("/jsi18n_multi_packages2/") + self.assertContains(response, "este texto de app3 debe ser traducido") + + def test_i18n_with_locale_paths(self): + extended_locale_paths = settings.LOCALE_PATHS + [ + path.join( + path.dirname(path.dirname(path.abspath(__file__))), + "app3", + "locale", + ), + ] + with self.settings(LANGUAGE_CODE="es-ar", LOCALE_PATHS=extended_locale_paths): + with override("es-ar"): + response = self.client.get("/jsi18n/") + self.assertContains(response, "este texto de app3 debe ser traducido") + + def test_i18n_unknown_package_error(self): + view = JavaScriptCatalog.as_view() + request = RequestFactory().get("/") + msg = "Invalid package(s) provided to JavaScriptCatalog: unknown_package" + with self.assertRaisesMessage(ValueError, msg): + view(request, packages="unknown_package") + msg += ",unknown_package2" + with self.assertRaisesMessage(ValueError, msg): + view(request, packages="unknown_package+unknown_package2") + + def test_template_encoding(self): + """ + The template is loaded directly, not via a template loader, and should + be opened as utf-8 charset as is the default specified on template + engines. + """ + from django.views.i18n import Path + + view = JavaScriptCatalog.as_view() + request = RequestFactory().get("/") + with mock.patch.object(Path, "open") as m: + view(request) + m.assert_called_once_with(encoding="utf-8") + + +@override_settings(ROOT_URLCONF="view_tests.urls") +class I18nSeleniumTests(SeleniumTestCase): + # The test cases use fixtures & translations from these apps. + available_apps = [ + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "view_tests", + ] + + @override_settings(LANGUAGE_CODE="de") + def test_javascript_gettext(self): + from selenium.webdriver.common.by import By + + self.selenium.get(self.live_server_url + "/jsi18n_template/") + elem = self.selenium.find_element(By.ID, "gettext") + self.assertEqual(elem.text, "Entfernen") + elem = self.selenium.find_element(By.ID, "ngettext_sing") + self.assertEqual(elem.text, "1 Element") + elem = self.selenium.find_element(By.ID, "ngettext_plur") + self.assertEqual(elem.text, "455 Elemente") + elem = self.selenium.find_element(By.ID, "ngettext_onnonplural") + self.assertEqual(elem.text, "Bild") + elem = self.selenium.find_element(By.ID, "pgettext") + self.assertEqual(elem.text, "Kann") + elem = self.selenium.find_element(By.ID, "npgettext_sing") + self.assertEqual(elem.text, "1 Resultat") + elem = self.selenium.find_element(By.ID, "npgettext_plur") + self.assertEqual(elem.text, "455 Resultate") + elem = self.selenium.find_element(By.ID, "formats") + self.assertEqual( + elem.text, + "DATE_INPUT_FORMATS is an object; DECIMAL_SEPARATOR is a string; " + "FIRST_DAY_OF_WEEK is a number;", + ) + + @modify_settings(INSTALLED_APPS={"append": ["view_tests.app1", "view_tests.app2"]}) + @override_settings(LANGUAGE_CODE="fr") + def test_multiple_catalogs(self): + from selenium.webdriver.common.by import By + + self.selenium.get(self.live_server_url + "/jsi18n_multi_catalogs/") + elem = self.selenium.find_element(By.ID, "app1string") + self.assertEqual( + elem.text, "il faut traduire cette chaîne de caractères de app1" + ) + elem = self.selenium.find_element(By.ID, "app2string") + self.assertEqual( + elem.text, "il faut traduire cette chaîne de caractères de app2" + ) diff --git a/testbed/django__django/tests/view_tests/tests/test_json.py b/testbed/django__django/tests/view_tests/tests/test_json.py new file mode 100644 index 0000000000000000000000000000000000000000..145e6e05a4f046b3df02a996a625465373bdcf62 --- /dev/null +++ b/testbed/django__django/tests/view_tests/tests/test_json.py @@ -0,0 +1,20 @@ +import json + +from django.test import SimpleTestCase, override_settings + + +@override_settings(ROOT_URLCONF="view_tests.generic_urls") +class JsonResponseTests(SimpleTestCase): + def test_json_response(self): + response = self.client.get("/json/response/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers["content-type"], "application/json") + self.assertEqual( + json.loads(response.content.decode()), + { + "a": [1, 2, 3], + "foo": {"bar": "baz"}, + "timestamp": "2013-05-19T20:00:00", + "value": "3.14", + }, + ) diff --git a/testbed/django__django/tests/view_tests/tests/test_specials.py b/testbed/django__django/tests/view_tests/tests/test_specials.py new file mode 100644 index 0000000000000000000000000000000000000000..d1bedf45b92a37e3d150f70f7482038a587f333c --- /dev/null +++ b/testbed/django__django/tests/view_tests/tests/test_specials.py @@ -0,0 +1,24 @@ +from django.test import SimpleTestCase, override_settings + + +@override_settings(ROOT_URLCONF="view_tests.generic_urls") +class URLHandling(SimpleTestCase): + """ + Tests for URL handling in views and responses. + """ + + redirect_target = "/%E4%B8%AD%E6%96%87/target/" + + def test_nonascii_redirect(self): + """ + A non-ASCII argument to HttpRedirect is handled properly. + """ + response = self.client.get("/nonascii_redirect/") + self.assertRedirects(response, self.redirect_target) + + def test_permanent_nonascii_redirect(self): + """ + A non-ASCII argument to HttpPermanentRedirect is handled properly. + """ + response = self.client.get("/permanent_nonascii_redirect/") + self.assertRedirects(response, self.redirect_target, status_code=301) diff --git a/testbed/django__django/tests/view_tests/tests/test_static.py b/testbed/django__django/tests/view_tests/tests/test_static.py new file mode 100644 index 0000000000000000000000000000000000000000..f60ca88cd248263f684106a92f180ee71f6df366 --- /dev/null +++ b/testbed/django__django/tests/view_tests/tests/test_static.py @@ -0,0 +1,214 @@ +import mimetypes +import unittest +from os import path +from unittest import mock +from urllib.parse import quote + +from django.conf.urls.static import static +from django.core.exceptions import ImproperlyConfigured +from django.http import FileResponse, HttpResponseNotModified +from django.test import SimpleTestCase, override_settings +from django.utils.http import http_date +from django.views.static import directory_index, was_modified_since + +from .. import urls +from ..urls import media_dir + + +@override_settings(DEBUG=True, ROOT_URLCONF="view_tests.urls") +class StaticTests(SimpleTestCase): + """Tests django views in django/views/static.py""" + + prefix = "site_media" + + def test_serve(self): + "The static view can serve static media" + media_files = ["file.txt", "file.txt.gz", "%2F.txt"] + for filename in media_files: + response = self.client.get("/%s/%s" % (self.prefix, quote(filename))) + response_content = b"".join(response) + file_path = path.join(media_dir, filename) + with open(file_path, "rb") as fp: + self.assertEqual(fp.read(), response_content) + self.assertEqual( + len(response_content), int(response.headers["Content-Length"]) + ) + self.assertEqual( + mimetypes.guess_type(file_path)[1], + response.get("Content-Encoding", None), + ) + + def test_chunked(self): + "The static view should stream files in chunks to avoid large memory usage" + response = self.client.get("/%s/%s" % (self.prefix, "long-line.txt")) + response_iterator = iter(response) + first_chunk = next(response_iterator) + self.assertEqual(len(first_chunk), FileResponse.block_size) + second_chunk = next(response_iterator) + response.close() + # strip() to prevent OS line endings from causing differences + self.assertEqual(len(second_chunk.strip()), 1449) + + def test_unknown_mime_type(self): + response = self.client.get("/%s/file.unknown" % self.prefix) + self.assertEqual("application/octet-stream", response.headers["Content-Type"]) + response.close() + + def test_copes_with_empty_path_component(self): + file_name = "file.txt" + response = self.client.get("/%s//%s" % (self.prefix, file_name)) + response_content = b"".join(response) + with open(path.join(media_dir, file_name), "rb") as fp: + self.assertEqual(fp.read(), response_content) + + def test_is_modified_since(self): + file_name = "file.txt" + response = self.client.get( + "/%s/%s" % (self.prefix, file_name), + headers={"if-modified-since": "Thu, 1 Jan 1970 00:00:00 GMT"}, + ) + response_content = b"".join(response) + with open(path.join(media_dir, file_name), "rb") as fp: + self.assertEqual(fp.read(), response_content) + + def test_not_modified_since(self): + file_name = "file.txt" + response = self.client.get( + "/%s/%s" % (self.prefix, file_name), + headers={ + # This is 24h before max Unix time. Remember to fix Django and + # update this test well before 2038 :) + "if-modified-since": "Mon, 18 Jan 2038 05:14:07 GMT" + }, + ) + self.assertIsInstance(response, HttpResponseNotModified) + + def test_invalid_if_modified_since(self): + """Handle bogus If-Modified-Since values gracefully + + Assume that a file is modified since an invalid timestamp as per RFC + 9110 Section 13.1.3. + """ + file_name = "file.txt" + invalid_date = "Mon, 28 May 999999999999 28:25:26 GMT" + response = self.client.get( + "/%s/%s" % (self.prefix, file_name), + headers={"if-modified-since": invalid_date}, + ) + response_content = b"".join(response) + with open(path.join(media_dir, file_name), "rb") as fp: + self.assertEqual(fp.read(), response_content) + self.assertEqual(len(response_content), int(response.headers["Content-Length"])) + + def test_invalid_if_modified_since2(self): + """Handle even more bogus If-Modified-Since values gracefully + + Assume that a file is modified since an invalid timestamp as per RFC + 9110 Section 13.1.3. + """ + file_name = "file.txt" + invalid_date = ": 1291108438, Wed, 20 Oct 2010 14:05:00 GMT" + response = self.client.get( + "/%s/%s" % (self.prefix, file_name), + headers={"if-modified-since": invalid_date}, + ) + response_content = b"".join(response) + with open(path.join(media_dir, file_name), "rb") as fp: + self.assertEqual(fp.read(), response_content) + self.assertEqual(len(response_content), int(response.headers["Content-Length"])) + + def test_404(self): + response = self.client.get("/%s/nonexistent_resource" % self.prefix) + self.assertEqual(404, response.status_code) + + def test_index(self): + response = self.client.get("/%s/" % self.prefix) + self.assertContains(response, "Index of ./") + # Directories have a trailing slash. + self.assertIn("subdir/", response.context["file_list"]) + + def test_index_subdir(self): + response = self.client.get("/%s/subdir/" % self.prefix) + self.assertContains(response, "Index of subdir/") + # File with a leading dot (e.g. .hidden) aren't displayed. + self.assertEqual(response.context["file_list"], ["visible"]) + + @override_settings( + TEMPLATES=[ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "OPTIONS": { + "loaders": [ + ( + "django.template.loaders.locmem.Loader", + { + "static/directory_index.html": "Test index", + }, + ), + ], + }, + } + ] + ) + def test_index_custom_template(self): + response = self.client.get("/%s/" % self.prefix) + self.assertEqual(response.content, b"Test index") + + def test_template_encoding(self): + """ + The template is loaded directly, not via a template loader, and should + be opened as utf-8 charset as is the default specified on template + engines. + """ + from django.views.static import Path + + with mock.patch.object(Path, "open") as m: + directory_index(mock.MagicMock(), mock.MagicMock()) + m.assert_called_once_with(encoding="utf-8") + + +class StaticHelperTest(StaticTests): + """ + Test case to make sure the static URL pattern helper works as expected + """ + + def setUp(self): + super().setUp() + self._old_views_urlpatterns = urls.urlpatterns[:] + urls.urlpatterns += static("media/", document_root=media_dir) + + def tearDown(self): + super().tearDown() + urls.urlpatterns = self._old_views_urlpatterns + + def test_prefix(self): + self.assertEqual(static("test")[0].pattern.regex.pattern, "^test(?P.*)$") + + @override_settings(DEBUG=False) + def test_debug_off(self): + """No URLs are served if DEBUG=False.""" + self.assertEqual(static("test"), []) + + def test_empty_prefix(self): + with self.assertRaisesMessage( + ImproperlyConfigured, "Empty static prefix not permitted" + ): + static("") + + def test_special_prefix(self): + """No URLs are served if prefix contains a netloc part.""" + self.assertEqual(static("http://example.org"), []) + self.assertEqual(static("//example.org"), []) + + +class StaticUtilsTests(unittest.TestCase): + def test_was_modified_since_fp(self): + """ + A floating point mtime does not disturb was_modified_since (#18675). + """ + mtime = 1343416141.107817 + header = http_date(mtime) + self.assertFalse(was_modified_since(header, mtime)) + + def test_was_modified_since_empty_string(self): + self.assertTrue(was_modified_since(header="", mtime=1)) diff --git a/testbed/django__django/tests/view_tests/urls.py b/testbed/django__django/tests/view_tests/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..c9a5c28573de0453aefd2496117d4af9147c6362 --- /dev/null +++ b/testbed/django__django/tests/view_tests/urls.py @@ -0,0 +1,90 @@ +import os +from functools import partial + +from django.conf.urls.i18n import i18n_patterns +from django.urls import include, path, re_path +from django.utils.translation import gettext_lazy as _ +from django.views import defaults, i18n, static + +from . import views + +base_dir = os.path.dirname(os.path.abspath(__file__)) +media_dir = os.path.join(base_dir, "media") +locale_dir = os.path.join(base_dir, "locale") + +urlpatterns = [ + path("", views.index_page), + # Default views + path("nonexistent_url/", partial(defaults.page_not_found, exception=None)), + path("server_error/", defaults.server_error), + # a view that raises an exception for the debug view + path("raises/", views.raises), + path("raises400/", views.raises400), + path("raises400_bad_request/", views.raises400_bad_request), + path("raises403/", views.raises403), + path("raises404/", views.raises404), + path("raises500/", views.raises500), + path("custom_reporter_class_view/", views.custom_reporter_class_view), + path("technical404/", views.technical404, name="my404"), + path("classbased404/", views.Http404View.as_view()), + path("classbased500/", views.Raises500View.as_view()), + # i18n views + path("i18n/", include("django.conf.urls.i18n")), + path("jsi18n/", i18n.JavaScriptCatalog.as_view(packages=["view_tests"])), + path("jsi18n_no_packages/", i18n.JavaScriptCatalog.as_view()), + path("jsi18n/app1/", i18n.JavaScriptCatalog.as_view(packages=["view_tests.app1"])), + path("jsi18n/app2/", i18n.JavaScriptCatalog.as_view(packages=["view_tests.app2"])), + path("jsi18n/app5/", i18n.JavaScriptCatalog.as_view(packages=["view_tests.app5"])), + path( + "jsi18n_english_translation/", + i18n.JavaScriptCatalog.as_view(packages=["view_tests.app0"]), + ), + path( + "jsi18n_multi_packages1/", + i18n.JavaScriptCatalog.as_view(packages=["view_tests.app1", "view_tests.app2"]), + ), + path( + "jsi18n_multi_packages2/", + i18n.JavaScriptCatalog.as_view(packages=["view_tests.app3", "view_tests.app4"]), + ), + path( + "jsi18n_admin/", + i18n.JavaScriptCatalog.as_view(packages=["django.contrib.admin", "view_tests"]), + ), + path("jsi18n_template/", views.jsi18n), + path("jsi18n_multi_catalogs/", views.jsi18n_multi_catalogs), + path("jsoni18n/", i18n.JSONCatalog.as_view(packages=["view_tests"])), + # Static views + re_path( + r"^site_media/(?P.*)$", + static.serve, + {"document_root": media_dir, "show_indexes": True}, + ), +] + +urlpatterns += i18n_patterns( + re_path(_(r"^translated/$"), views.index_page, name="i18n_prefixed"), +) + +urlpatterns += [ + path( + "safestring_exception/", + views.safestring_in_template_exception, + name="safestring_exception", + ), + path("template_exception/", views.template_exception, name="template_exception"), + path( + "raises_template_does_not_exist/", + views.raises_template_does_not_exist, + name="raises_template_does_not_exist", + ), + path("render_no_template/", views.render_no_template, name="render_no_template"), + re_path( + r"^test-setlang/(?P[^/]+)/$", + views.with_parameter, + name="with_parameter", + ), + # Patterns to test the technical 404. + re_path(r"^regex-post/(?P[0-9]+)/$", views.index_page, name="regex-post"), + path("path-post//", views.index_page, name="path-post"), +] diff --git a/testbed/django__django/tests/view_tests/views.py b/testbed/django__django/tests/view_tests/views.py new file mode 100644 index 0000000000000000000000000000000000000000..9eb7a352d63d66a50bdec19c7fb9baf5c3fbfb9e --- /dev/null +++ b/testbed/django__django/tests/view_tests/views.py @@ -0,0 +1,424 @@ +import datetime +import decimal +import logging +import sys +from pathlib import Path + +from django.core.exceptions import BadRequest, PermissionDenied, SuspiciousOperation +from django.http import Http404, HttpResponse, JsonResponse +from django.shortcuts import render +from django.template import Context, Template, TemplateDoesNotExist +from django.urls import get_resolver +from django.views import View +from django.views.debug import ( + ExceptionReporter, + SafeExceptionReporterFilter, + technical_500_response, +) +from django.views.decorators.debug import sensitive_post_parameters, sensitive_variables + +TEMPLATES_PATH = Path(__file__).resolve().parent / "templates" + + +def index_page(request): + """Dummy index page""" + return HttpResponse("Dummy page") + + +def with_parameter(request, parameter): + return HttpResponse("ok") + + +def raises(request): + # Make sure that a callable that raises an exception in the stack frame's + # local vars won't hijack the technical 500 response (#15025). + def callable(): + raise Exception + + try: + raise Exception + except Exception: + return technical_500_response(request, *sys.exc_info()) + + +def raises500(request): + # We need to inspect the HTML generated by the fancy 500 debug view but + # the test client ignores it, so we send it explicitly. + try: + raise Exception + except Exception: + return technical_500_response(request, *sys.exc_info()) + + +class Raises500View(View): + def get(self, request): + try: + raise Exception + except Exception: + return technical_500_response(request, *sys.exc_info()) + + +def raises400(request): + raise SuspiciousOperation + + +def raises400_bad_request(request): + raise BadRequest("Malformed request syntax") + + +def raises403(request): + raise PermissionDenied("Insufficient Permissions") + + +def raises404(request): + resolver = get_resolver(None) + resolver.resolve("/not-in-urls") + + +def technical404(request): + raise Http404("Testing technical 404.") + + +class Http404View(View): + def get(self, request): + raise Http404("Testing class-based technical 404.") + + +def template_exception(request): + return render(request, "debug/template_exception.html") + + +def safestring_in_template_exception(request): + """ + Trigger an exception in the template machinery which causes a SafeString + to be inserted as args[0] of the Exception. + """ + template = Template('{% extends "" %}') + try: + template.render(Context()) + except Exception: + return technical_500_response(request, *sys.exc_info()) + + +def jsi18n(request): + return render(request, "jsi18n.html") + + +def jsi18n_multi_catalogs(request): + return render(request, "jsi18n-multi-catalogs.html") + + +def raises_template_does_not_exist(request, path="i_dont_exist.html"): + # We need to inspect the HTML generated by the fancy 500 debug view but + # the test client ignores it, so we send it explicitly. + try: + return render(request, path) + except TemplateDoesNotExist: + return technical_500_response(request, *sys.exc_info()) + + +def render_no_template(request): + # If we do not specify a template, we need to make sure the debug + # view doesn't blow up. + return render(request, [], {}) + + +def send_log(request, exc_info): + logger = logging.getLogger("django") + # The default logging config has a logging filter to ensure admin emails are + # only sent with DEBUG=False, but since someone might choose to remove that + # filter, we still want to be able to test the behavior of error emails + # with DEBUG=True. So we need to remove the filter temporarily. + admin_email_handler = [ + h for h in logger.handlers if h.__class__.__name__ == "AdminEmailHandler" + ][0] + orig_filters = admin_email_handler.filters + admin_email_handler.filters = [] + admin_email_handler.include_html = True + logger.error( + "Internal Server Error: %s", + request.path, + exc_info=exc_info, + extra={"status_code": 500, "request": request}, + ) + admin_email_handler.filters = orig_filters + + +def non_sensitive_view(request): + # Do not just use plain strings for the variables' values in the code + # so that the tests don't return false positives when the function's source + # is displayed in the exception report. + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + sauce = "".join( # NOQA + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + try: + raise Exception + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + +@sensitive_variables("sauce") +@sensitive_post_parameters("bacon-key", "sausage-key") +def sensitive_view(request): + # Do not just use plain strings for the variables' values in the code + # so that the tests don't return false positives when the function's source + # is displayed in the exception report. + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + sauce = "".join( # NOQA + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + try: + raise Exception + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + +@sensitive_variables("sauce") +@sensitive_post_parameters("bacon-key", "sausage-key") +async def async_sensitive_view(request): + # Do not just use plain strings for the variables' values in the code so + # that the tests don't return false positives when the function's source is + # displayed in the exception report. + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + sauce = "".join( # NOQA + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + try: + raise Exception + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + +@sensitive_variables("sauce") +@sensitive_post_parameters("bacon-key", "sausage-key") +async def async_sensitive_function(request): + # Do not just use plain strings for the variables' values in the code so + # that the tests don't return false positives when the function's source is + # displayed in the exception report. + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + sauce = "".join( # NOQA + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + raise Exception + + +async def async_sensitive_view_nested(request): + try: + await async_sensitive_function(request) + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + +@sensitive_variables() +@sensitive_post_parameters() +def paranoid_view(request): + # Do not just use plain strings for the variables' values in the code + # so that the tests don't return false positives when the function's source + # is displayed in the exception report. + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + sauce = "".join( # NOQA + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + try: + raise Exception + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + +def sensitive_args_function_caller(request): + try: + sensitive_args_function( + "".join( + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + ) + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + +@sensitive_variables("sauce") +def sensitive_args_function(sauce): + # Do not just use plain strings for the variables' values in the code + # so that the tests don't return false positives when the function's source + # is displayed in the exception report. + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + raise Exception + + +def sensitive_kwargs_function_caller(request): + try: + sensitive_kwargs_function( + "".join( + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + ) + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + +@sensitive_variables("sauce") +def sensitive_kwargs_function(sauce=None): + # Do not just use plain strings for the variables' values in the code + # so that the tests don't return false positives when the function's source + # is displayed in the exception report. + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + raise Exception + + +class UnsafeExceptionReporterFilter(SafeExceptionReporterFilter): + """ + Ignores all the filtering done by its parent class. + """ + + def get_post_parameters(self, request): + return request.POST + + def get_traceback_frame_variables(self, request, tb_frame): + return tb_frame.f_locals.items() + + +@sensitive_variables() +@sensitive_post_parameters() +def custom_exception_reporter_filter_view(request): + # Do not just use plain strings for the variables' values in the code + # so that the tests don't return false positives when the function's source + # is displayed in the exception report. + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + sauce = "".join( # NOQA + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + request.exception_reporter_filter = UnsafeExceptionReporterFilter() + try: + raise Exception + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + +class CustomExceptionReporter(ExceptionReporter): + custom_traceback_text = "custom traceback text" + + def get_traceback_html(self): + return self.custom_traceback_text + + +class TemplateOverrideExceptionReporter(ExceptionReporter): + html_template_path = TEMPLATES_PATH / "my_technical_500.html" + text_template_path = TEMPLATES_PATH / "my_technical_500.txt" + + +def custom_reporter_class_view(request): + request.exception_reporter_class = CustomExceptionReporter + try: + raise Exception + except Exception: + exc_info = sys.exc_info() + return technical_500_response(request, *exc_info) + + +class Klass: + @sensitive_variables("sauce") + def method(self, request): + # Do not just use plain strings for the variables' values in the code + # so that the tests don't return false positives when the function's + # source is displayed in the exception report. + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + sauce = "".join( # NOQA + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + try: + raise Exception + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + @sensitive_variables("sauce") + async def async_method(self, request): + # Do not just use plain strings for the variables' values in the code + # so that the tests don't return false positives when the function's + # source is displayed in the exception report. + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + sauce = "".join( # NOQA + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + try: + raise Exception + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + @sensitive_variables("sauce") + async def _async_method_inner(self, request): + # Do not just use plain strings for the variables' values in the code + # so that the tests don't return false positives when the function's + # source is displayed in the exception report. + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + sauce = "".join( # NOQA + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + raise Exception + + async def async_method_nested(self, request): + try: + await self._async_method_inner(request) + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + +def sensitive_method_view(request): + return Klass().method(request) + + +async def async_sensitive_method_view(request): + return await Klass().async_method(request) + + +async def async_sensitive_method_view_nested(request): + return await Klass().async_method_nested(request) + + +@sensitive_variables("sauce") +@sensitive_post_parameters("bacon-key", "sausage-key") +def multivalue_dict_key_error(request): + cooked_eggs = "".join(["s", "c", "r", "a", "m", "b", "l", "e", "d"]) # NOQA + sauce = "".join( # NOQA + ["w", "o", "r", "c", "e", "s", "t", "e", "r", "s", "h", "i", "r", "e"] + ) + try: + request.POST["bar"] + except Exception: + exc_info = sys.exc_info() + send_log(request, exc_info) + return technical_500_response(request, *exc_info) + + +def json_response_view(request): + return JsonResponse( + { + "a": [1, 2, 3], + "foo": {"bar": "baz"}, + # Make sure datetime and Decimal objects would be serialized properly + "timestamp": datetime.datetime(2013, 5, 19, 20), + "value": decimal.Decimal("3.14"), + } + ) diff --git a/testbed/django__django/tests/wsgi/__init__.py b/testbed/django__django/tests/wsgi/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/wsgi/tests.py b/testbed/django__django/tests/wsgi/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..b1b5a7d00ef109fe817692b2cde52d2bd9bad934 --- /dev/null +++ b/testbed/django__django/tests/wsgi/tests.py @@ -0,0 +1,131 @@ +from django.core.exceptions import ImproperlyConfigured +from django.core.servers.basehttp import get_internal_wsgi_application +from django.core.signals import request_started +from django.core.wsgi import get_wsgi_application +from django.db import close_old_connections +from django.http import FileResponse +from django.test import SimpleTestCase, override_settings +from django.test.client import RequestFactory + + +@override_settings(ROOT_URLCONF="wsgi.urls") +class WSGITest(SimpleTestCase): + request_factory = RequestFactory() + + def setUp(self): + request_started.disconnect(close_old_connections) + + def tearDown(self): + request_started.connect(close_old_connections) + + def test_get_wsgi_application(self): + """ + get_wsgi_application() returns a functioning WSGI callable. + """ + application = get_wsgi_application() + + environ = self.request_factory._base_environ( + PATH_INFO="/", CONTENT_TYPE="text/html; charset=utf-8", REQUEST_METHOD="GET" + ) + + response_data = {} + + def start_response(status, headers): + response_data["status"] = status + response_data["headers"] = headers + + response = application(environ, start_response) + + self.assertEqual(response_data["status"], "200 OK") + self.assertEqual( + set(response_data["headers"]), + {("Content-Length", "12"), ("Content-Type", "text/html; charset=utf-8")}, + ) + self.assertIn( + bytes(response), + [ + b"Content-Length: 12\r\nContent-Type: text/html; " + b"charset=utf-8\r\n\r\nHello World!", + b"Content-Type: text/html; " + b"charset=utf-8\r\nContent-Length: 12\r\n\r\nHello World!", + ], + ) + + def test_file_wrapper(self): + """ + FileResponse uses wsgi.file_wrapper. + """ + + class FileWrapper: + def __init__(self, filelike, block_size=None): + self.block_size = block_size + filelike.close() + + application = get_wsgi_application() + environ = self.request_factory._base_environ( + PATH_INFO="/file/", + REQUEST_METHOD="GET", + **{"wsgi.file_wrapper": FileWrapper}, + ) + response_data = {} + + def start_response(status, headers): + response_data["status"] = status + response_data["headers"] = headers + + response = application(environ, start_response) + self.assertEqual(response_data["status"], "200 OK") + self.assertIsInstance(response, FileWrapper) + self.assertEqual(response.block_size, FileResponse.block_size) + + +class GetInternalWSGIApplicationTest(SimpleTestCase): + @override_settings(WSGI_APPLICATION="wsgi.wsgi.application") + def test_success(self): + """ + If ``WSGI_APPLICATION`` is a dotted path, the referenced object is + returned. + """ + app = get_internal_wsgi_application() + + from .wsgi import application + + self.assertIs(app, application) + + @override_settings(WSGI_APPLICATION=None) + def test_default(self): + """ + If ``WSGI_APPLICATION`` is ``None``, the return value of + ``get_wsgi_application`` is returned. + """ + # Mock out get_wsgi_application so we know its return value is used + fake_app = object() + + def mock_get_wsgi_app(): + return fake_app + + from django.core.servers import basehttp + + _orig_get_wsgi_app = basehttp.get_wsgi_application + basehttp.get_wsgi_application = mock_get_wsgi_app + + try: + app = get_internal_wsgi_application() + + self.assertIs(app, fake_app) + finally: + basehttp.get_wsgi_application = _orig_get_wsgi_app + + @override_settings(WSGI_APPLICATION="wsgi.noexist.app") + def test_bad_module(self): + msg = "WSGI application 'wsgi.noexist.app' could not be loaded; Error importing" + with self.assertRaisesMessage(ImproperlyConfigured, msg): + get_internal_wsgi_application() + + @override_settings(WSGI_APPLICATION="wsgi.wsgi.noexist") + def test_bad_name(self): + msg = ( + "WSGI application 'wsgi.wsgi.noexist' could not be loaded; Error importing" + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + get_internal_wsgi_application() diff --git a/testbed/django__django/tests/wsgi/urls.py b/testbed/django__django/tests/wsgi/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..b924adea41aec992dcbb93509821a4bd68507fe8 --- /dev/null +++ b/testbed/django__django/tests/wsgi/urls.py @@ -0,0 +1,12 @@ +from django.http import FileResponse, HttpResponse +from django.urls import path + + +def helloworld(request): + return HttpResponse("Hello World!") + + +urlpatterns = [ + path("", helloworld), + path("file/", lambda x: FileResponse(open(__file__, "rb"))), +] diff --git a/testbed/django__django/tests/wsgi/wsgi.py b/testbed/django__django/tests/wsgi/wsgi.py new file mode 100644 index 0000000000000000000000000000000000000000..400457f1529f44c52a574684b64810cdc41e55ce --- /dev/null +++ b/testbed/django__django/tests/wsgi/wsgi.py @@ -0,0 +1,2 @@ +# This is just to test finding, it doesn't have to be a real WSGI callable +application = object() diff --git a/testbed/django__django/tests/xor_lookups/__init__.py b/testbed/django__django/tests/xor_lookups/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/tests/xor_lookups/models.py b/testbed/django__django/tests/xor_lookups/models.py new file mode 100644 index 0000000000000000000000000000000000000000..22e79aa94fd5a86af26c4757d7f0fd369034d639 --- /dev/null +++ b/testbed/django__django/tests/xor_lookups/models.py @@ -0,0 +1,8 @@ +from django.db import models + + +class Number(models.Model): + num = models.IntegerField() + + def __str__(self): + return str(self.num) diff --git a/testbed/django__django/tests/xor_lookups/tests.py b/testbed/django__django/tests/xor_lookups/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..d58d16cf11b8011eac09d9b6110b45ab890cad46 --- /dev/null +++ b/testbed/django__django/tests/xor_lookups/tests.py @@ -0,0 +1,88 @@ +from django.db.models import Q +from django.test import TestCase + +from .models import Number + + +class XorLookupsTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.numbers = [Number.objects.create(num=i) for i in range(10)] + + def test_filter(self): + self.assertCountEqual( + Number.objects.filter(num__lte=7) ^ Number.objects.filter(num__gte=3), + self.numbers[:3] + self.numbers[8:], + ) + self.assertCountEqual( + Number.objects.filter(Q(num__lte=7) ^ Q(num__gte=3)), + self.numbers[:3] + self.numbers[8:], + ) + + def test_filter_multiple(self): + qs = Number.objects.filter( + Q(num__gte=1) + ^ Q(num__gte=3) + ^ Q(num__gte=5) + ^ Q(num__gte=7) + ^ Q(num__gte=9) + ) + self.assertCountEqual( + qs, + self.numbers[1:3] + self.numbers[5:7] + self.numbers[9:], + ) + self.assertCountEqual( + qs.values_list("num", flat=True), + [ + i + for i in range(10) + if (i >= 1) ^ (i >= 3) ^ (i >= 5) ^ (i >= 7) ^ (i >= 9) + ], + ) + + def test_filter_negated(self): + self.assertCountEqual( + Number.objects.filter(Q(num__lte=7) ^ ~Q(num__lt=3)), + self.numbers[:3] + self.numbers[8:], + ) + self.assertCountEqual( + Number.objects.filter(~Q(num__gt=7) ^ ~Q(num__lt=3)), + self.numbers[:3] + self.numbers[8:], + ) + self.assertCountEqual( + Number.objects.filter(Q(num__lte=7) ^ ~Q(num__lt=3) ^ Q(num__lte=1)), + [self.numbers[2]] + self.numbers[8:], + ) + self.assertCountEqual( + Number.objects.filter(~(Q(num__lte=7) ^ ~Q(num__lt=3) ^ Q(num__lte=1))), + self.numbers[:2] + self.numbers[3:8], + ) + + def test_exclude(self): + self.assertCountEqual( + Number.objects.exclude(Q(num__lte=7) ^ Q(num__gte=3)), + self.numbers[3:8], + ) + + def test_stages(self): + numbers = Number.objects.all() + self.assertSequenceEqual( + numbers.filter(num__gte=0) ^ numbers.filter(num__lte=11), + [], + ) + self.assertSequenceEqual( + numbers.filter(num__gt=0) ^ numbers.filter(num__lt=11), + [self.numbers[0]], + ) + + def test_pk_q(self): + self.assertCountEqual( + Number.objects.filter(Q(pk=self.numbers[0].pk) ^ Q(pk=self.numbers[1].pk)), + self.numbers[:2], + ) + + def test_empty_in(self): + self.assertCountEqual( + Number.objects.filter(Q(pk__in=[]) ^ Q(num__gte=5)), + self.numbers[5:], + ) diff --git a/testbed/django__django/tox.ini b/testbed/django__django/tox.ini new file mode 100644 index 0000000000000000000000000000000000000000..978d488c13ae7f4a75f68dcad6efba979a03cf2a --- /dev/null +++ b/testbed/django__django/tox.ini @@ -0,0 +1,89 @@ +# Tox (https://tox.readthedocs.io/) is a tool for running tests in multiple +# virtualenvs. This configuration file helps to run the test suite on all +# supported Python versions. To use it, "python -m pip install tox" and +# then run "tox" from this directory. + +[tox] +minversion = 4.0 +skipsdist = true +envlist = + py3 + black + blacken-docs + flake8 + docs + isort + +# Add environment to use the default python3 installation +[testenv:py3] +basepython = python3 + +[testenv] +usedevelop = true +# OBJC_DISABLE_INITIALIZE_FORK_SAFETY fixes hung tests for MacOS users. (#30806) +passenv = DJANGO_SETTINGS_MODULE,PYTHONPATH,HOME,DISPLAY,OBJC_DISABLE_INITIALIZE_FORK_SAFETY +setenv = + PYTHONDONTWRITEBYTECODE=1 +deps = + -e . + py{3,310,311}: -rtests/requirements/py3.txt + postgres: -rtests/requirements/postgres.txt + mysql: -rtests/requirements/mysql.txt + oracle: -rtests/requirements/oracle.txt +changedir = tests +commands = + {envpython} runtests.py {posargs} + +[testenv:black] +basepython = python3 +usedevelop = false +deps = black +changedir = {toxinidir} +commands = black --check --diff . + +[testenv:blacken-docs] +basepython = python3 +usedevelop = false +allowlist_externals = + make +deps = blacken-docs +changedir = docs +commands = + make black + +[testenv:flake8] +basepython = python3 +usedevelop = false +deps = flake8 >= 3.7.0 +changedir = {toxinidir} +commands = flake8 . + +[testenv:docs] +basepython = python3 +usedevelop = false +allowlist_externals = + make +deps = + Sphinx + pyenchant + sphinxcontrib-spelling +changedir = docs +commands = + make spelling + +[testenv:isort] +basepython = python3 +usedevelop = false +deps = isort >= 5.1.0 +changedir = {toxinidir} +commands = isort --check-only --diff django tests scripts + +[testenv:javascript] +usedevelop = false +deps = +changedir = {toxinidir} +allowlist_externals = + npm +commands = + npm install + npm test diff --git a/testbed/docker__docker-py/.coveragerc b/testbed/docker__docker-py/.coveragerc new file mode 100644 index 0000000000000000000000000000000000000000..0fa6f7fc2ec5746b0040487330c306e9e7863340 --- /dev/null +++ b/testbed/docker__docker-py/.coveragerc @@ -0,0 +1,10 @@ +[run] +branch = True +source = docker + +[report] +exclude_lines = + if __name__ == .__main__.: + +[html] +directory = html diff --git a/testbed/docker__docker-py/.dockerignore b/testbed/docker__docker-py/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..a1e09dfaf0f1bd00f7bac4327b25056281dce633 --- /dev/null +++ b/testbed/docker__docker-py/.dockerignore @@ -0,0 +1,17 @@ +.git/ + +build +dist +*.egg-info +*.egg/ +*.pyc +*.swp + +.tox +.coverage +html/* +tests/__pycache__ + +# Compiled Documentation +site/ +Makefile diff --git a/testbed/docker__docker-py/.editorconfig b/testbed/docker__docker-py/.editorconfig new file mode 100644 index 0000000000000000000000000000000000000000..d7f2776ada4e363cf1e8473ad249737c55e9f647 --- /dev/null +++ b/testbed/docker__docker-py/.editorconfig @@ -0,0 +1,11 @@ +root = true + +[*] +indent_style = space +indent_size = 4 +insert_final_newline = true +trim_trailing_whitespace = true +max_line_length = 80 + +[*.md] +trim_trailing_whitespace = false diff --git a/testbed/docker__docker-py/.gitignore b/testbed/docker__docker-py/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..9980377f9cc414ace50afea63028a6fbef74a2c2 --- /dev/null +++ b/testbed/docker__docker-py/.gitignore @@ -0,0 +1,17 @@ +build +dist +*.egg-info +*.egg/ +*.pyc +*.swp + +.tox +.coverage +html/* + +# Compiled Documentation +site/ + +env/ +venv/ +.idea/ diff --git a/testbed/docker__docker-py/.travis.yml b/testbed/docker__docker-py/.travis.yml new file mode 100644 index 0000000000000000000000000000000000000000..abbb578295d3bb41d39d937973944ede12565143 --- /dev/null +++ b/testbed/docker__docker-py/.travis.yml @@ -0,0 +1,14 @@ +sudo: false +language: python +python: + - "2.7" +env: + - TOX_ENV=py26 + - TOX_ENV=py27 + - TOX_ENV=py33 + - TOX_ENV=py34 + - TOX_ENV=flake8 +install: + - pip install tox +script: + - tox -e $TOX_ENV diff --git a/testbed/docker__docker-py/CONTRIBUTING.md b/testbed/docker__docker-py/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..75365c8825a2da9e3b1a9d4bb48a305953a6e72a --- /dev/null +++ b/testbed/docker__docker-py/CONTRIBUTING.md @@ -0,0 +1,102 @@ +# Contributing guidelines + +Thank you for your interest in the project. We look forward to your +contribution. In order to make the process as fast and streamlined as possible, +here is a set of guidelines we recommend you follow. + +## Reporting issues + +We do our best to ensure bugs don't creep up in our releases, but some may +still slip through. If you encounter one while using docker-py, please create +an issue [in the tracker](https://github.com/docker/docker-py/issues/new) with +the following information: + +- docker-py version, docker version and python version +``` +pip freeze | grep docker-py && python --version && docker version +``` +- OS, distribution and OS version +- The issue you're encountering including a stacktrace if applicable +- If possible, steps or a code snippet to reproduce the issue + +To save yourself time, please be sure to check our +[documentation](http://docker-py.readthedocs.org/) and use the +[search function](https://github.com/docker/docker-py/search) to find out if +it has already been addressed, or is currently being looked at. + +## Submitting pull requests + +Do you have a fix for an existing issue, or want to add a new functionality +to docker-py? We happily welcome pull requests. Here are a few tips to make +the review process easier on both the maintainers and yourself. + +### 1. Sign your commits + +Please refer to the ["Sign your work"](https://github.com/docker/docker/blob/master/CONTRIBUTING.md#sign-your-work) +paragraph in the Docker contribution guidelines. + +### 2. Make sure tests pass + +Before we can review your pull request, please ensure that nothing has been +broken by your changes by running the test suite. You can do so simply by +running `make test` in the project root. This also includes coding style using +`flake8` + +### 3. Write clear, self-contained commits + +Your commit message should be concise and describe the nature of the change. +The commit itself should make sense in isolation from the others in your PR. +Specifically, one should be able to review your commit separately from the +context. + +### 4. Rebase proactively + +It's much easier to review a pull request that is up to date against the +current master branch. + +### 5. Notify thread subscribers when changes are made + +GitHub doesn't notify subscribers when new commits happen on a PR, and +fixes or additions might be missed. Please add a comment to the PR thread +when you push new changes. + +### 6. Two maintainers LGTM are required for merging + +Please wait for review and approval of two maintainers, and respond to their +comments and suggestions during review. + +### 7. Add tests + +Whether you're adding new functionality to the project or fixing a bug, please +add relevant tests to ensure the code you added continues to work as the +project evolves. + +### 8. Add docs + +This usually applies to new features rather than bug fixes, but new behavior +should always be documented. + +### 9. Ask questions + +If you're ever confused about something pertaining to the project, feel free +to reach out and ask questions. We will do our best to answer and help out. + + +## Development environment + +If you're looking contribute to docker-py but are new to the project or Python, +here are the steps to get you started. + +1. Fork [https://github.com/docker/docker-py](https://github.com/docker/docker-py) + to your username. +2. Clone your forked repository locally with + `git clone git@github.com:yourusername/docker-py.git`. +3. Configure a + [remote](https://help.github.com/articles/configuring-a-remote-for-a-fork/) + for your fork so that you can + [sync changes you make](https://help.github.com/articles/syncing-a-fork/) + with the original repository. +4. Enter the local directory `cd docker-py`. +5. Run `python setup.py develop` to install the dev version of the project + and required dependencies. We recommend you do so inside a + [virtual environment](http://docs.python-guide.org/en/latest/dev/virtualenvs) diff --git a/testbed/docker__docker-py/Dockerfile b/testbed/docker__docker-py/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..f1b806b716a3a8851d1fc8016239bd12ddb7e6e3 --- /dev/null +++ b/testbed/docker__docker-py/Dockerfile @@ -0,0 +1,14 @@ +FROM python:2.7 +MAINTAINER Joffrey F + +RUN mkdir /home/docker-py +WORKDIR /home/docker-py + +ADD requirements.txt /home/docker-py/requirements.txt +RUN pip install -r requirements.txt + +ADD test-requirements.txt /home/docker-py/test-requirements.txt +RUN pip install -r test-requirements.txt + +ADD . /home/docker-py +RUN pip install . diff --git a/testbed/docker__docker-py/Dockerfile-py3 b/testbed/docker__docker-py/Dockerfile-py3 new file mode 100644 index 0000000000000000000000000000000000000000..a19d974a00faf7937d86092eede3afe8e94f8a9a --- /dev/null +++ b/testbed/docker__docker-py/Dockerfile-py3 @@ -0,0 +1,14 @@ +FROM python:3.4 +MAINTAINER Joffrey F + +RUN mkdir /home/docker-py +WORKDIR /home/docker-py + +ADD requirements.txt /home/docker-py/requirements.txt +RUN pip install -r requirements.txt + +ADD test-requirements.txt /home/docker-py/test-requirements.txt +RUN pip install -r test-requirements.txt + +ADD . /home/docker-py +RUN pip install . diff --git a/testbed/docker__docker-py/LICENSE b/testbed/docker__docker-py/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d645695673349e3947e8e5ae42332d0ac3164cd7 --- /dev/null +++ b/testbed/docker__docker-py/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/testbed/docker__docker-py/MAINTAINERS b/testbed/docker__docker-py/MAINTAINERS new file mode 100644 index 0000000000000000000000000000000000000000..ed93c01c70e6fb108124e330cba510c24d3b255e --- /dev/null +++ b/testbed/docker__docker-py/MAINTAINERS @@ -0,0 +1,52 @@ +# Docker-py maintainers file +# +# This file describes who runs the docker/docker-py project and how. +# This is a living document - if you see something out of date or missing, speak up! +# +# It is structured to be consumable by both humans and programs. +# To extract its contents programmatically, use any TOML-compliant parser. +# +# This file is compiled into the MAINTAINERS file in docker/opensource. +# +[Org] + [Org."Core maintainers"] + people = [ + "aanand", + "dnephin", + "mnowster", + "mpetazzoni", + "shin-", + ] + +[people] + +# A reference list of all people associated with the project. +# All other sections should refer to people by their canonical key +# in the people section. + + # ADD YOURSELF HERE IN ALPHABETICAL ORDER + + [people.aanand] + Name = "Aanand Prasad" + Email = "aanand@docker.com" + GitHub = "aanand" + + [people.dnephin] + Name = "Daniel Nephin" + Email = "dnephin@gmail.com" + GitHub = "dnephin" + + [people.mnowster] + Name = "Mazz Mosley" + Email = "mazz@houseofmnowster.com" + GitHub = "mnowster" + + [people.mpetazzoni] + Name = "Maxime Petazzoni" + Email = "maxime.petazzoni@bulix.org" + GitHub = "mpetazzoni" + + [people.shin-] + Name = "Joffrey F" + Email = "joffrey@docker.com" + GitHub = "shin-" diff --git a/testbed/docker__docker-py/MANIFEST.in b/testbed/docker__docker-py/MANIFEST.in new file mode 100644 index 0000000000000000000000000000000000000000..5513a10fdf24e55f8275213f7d6bb6012472681d --- /dev/null +++ b/testbed/docker__docker-py/MANIFEST.in @@ -0,0 +1,6 @@ +include test-requirements.txt +include requirements.txt +include README.md +include LICENSE +recursive-include tests *.py +recursive-include tests/unit/testdata * diff --git a/testbed/docker__docker-py/Makefile b/testbed/docker__docker-py/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..22abf50a77bc4adfdd7e74212c7f60353a174982 --- /dev/null +++ b/testbed/docker__docker-py/Makefile @@ -0,0 +1,48 @@ +.PHONY: all build test integration-test unit-test build-py3 unit-test-py3 integration-test-py3 + +all: test + +clean: + rm -rf tests/__pycache__ + rm -rf tests/*/__pycache__ + docker rm -vf dpy-dind + +build: + docker build -t docker-py . + +build-py3: + docker build -t docker-py3 -f Dockerfile-py3 . + +build-dind-certs: + docker build -t dpy-dind-certs -f tests/Dockerfile-dind-certs . + +test: flake8 unit-test unit-test-py3 integration-dind integration-dind-ssl + +unit-test: build + docker run docker-py py.test tests/unit + +unit-test-py3: build-py3 + docker run docker-py3 py.test tests/unit + +integration-test: build + docker run -v /var/run/docker.sock:/var/run/docker.sock docker-py py.test tests/integration + +integration-test-py3: build-py3 + docker run -v /var/run/docker.sock:/var/run/docker.sock docker-py3 py.test tests/integration + +integration-dind: build build-py3 + docker rm -vf dpy-dind || : + docker run -d --name dpy-dind --env="DOCKER_HOST=tcp://localhost:2375" --privileged dockerswarm/dind:1.9.0 docker -d -H tcp://0.0.0.0:2375 + docker run --env="DOCKER_HOST=tcp://docker:2375" --link=dpy-dind:docker docker-py py.test tests/integration + docker run --env="DOCKER_HOST=tcp://docker:2375" --link=dpy-dind:docker docker-py3 py.test tests/integration + docker rm -vf dpy-dind + +integration-dind-ssl: build-dind-certs build build-py3 + docker run -d --name dpy-dind-certs dpy-dind-certs + docker run -d --env="DOCKER_HOST=tcp://localhost:2375" --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --volumes-from dpy-dind-certs --name dpy-dind-ssl -v /tmp --privileged dockerswarm/dind:1.9.0 docker daemon --tlsverify --tlscacert=/certs/ca.pem --tlscert=/certs/server-cert.pem --tlskey=/certs/server-key.pem -H tcp://0.0.0.0:2375 + docker run --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375" --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --link=dpy-dind-ssl:docker docker-py py.test tests/integration + docker run --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375" --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --link=dpy-dind-ssl:docker docker-py3 py.test tests/integration + docker rm -vf dpy-dind-ssl dpy-dind-certs + +flake8: build + docker run docker-py flake8 docker tests diff --git a/testbed/docker__docker-py/README.md b/testbed/docker__docker-py/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e0daf567c3c655cedcf5696ca05bd0c313aeff9d --- /dev/null +++ b/testbed/docker__docker-py/README.md @@ -0,0 +1,26 @@ +docker-py +========= + +[![Build Status](https://travis-ci.org/docker/docker-py.png)](https://travis-ci.org/docker/docker-py) + +A Python library for the Docker Remote API. It does everything the `docker` command does, but from within Python – run containers, manage them, pull/push images, etc. + +Installation +------------ + +The latest stable version is always available on PyPi. + + pip install docker-py + +Documentation +------------ + +[![Documentation Status](https://readthedocs.org/projects/docker-py/badge/?version=latest)](https://readthedocs.org/projects/docker-py/?badge=latest) + +[Read the full documentation here.](http://docker-py.readthedocs.org/en/latest/). +The source is available in the `docs/` directory. + + +License +------- +Docker is licensed under the Apache License, Version 2.0. See LICENSE for full license text diff --git a/testbed/docker__docker-py/docker/__init__.py b/testbed/docker__docker-py/docker/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3844c81ac844d38a3a9b3c3c6ebef0c054650a4c --- /dev/null +++ b/testbed/docker__docker-py/docker/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2013 dotCloud inc. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .version import version, version_info + +__version__ = version +__title__ = 'docker-py' + +from .client import Client, AutoVersionClient # flake8: noqa diff --git a/testbed/docker__docker-py/docker/api/__init__.py b/testbed/docker__docker-py/docker/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9e7442890c708837dec60659f7bd6451c75a4212 --- /dev/null +++ b/testbed/docker__docker-py/docker/api/__init__.py @@ -0,0 +1,8 @@ +# flake8: noqa +from .build import BuildApiMixin +from .container import ContainerApiMixin +from .daemon import DaemonApiMixin +from .exec_api import ExecApiMixin +from .image import ImageApiMixin +from .volume import VolumeApiMixin +from .network import NetworkApiMixin diff --git a/testbed/docker__docker-py/docker/api/build.py b/testbed/docker__docker-py/docker/api/build.py new file mode 100644 index 0000000000000000000000000000000000000000..6bfaba102e4fade9c25e8908cebefeaa5d77709c --- /dev/null +++ b/testbed/docker__docker-py/docker/api/build.py @@ -0,0 +1,141 @@ +import logging +import os +import re +import json + +from .. import constants +from .. import errors +from .. import auth +from .. import utils + + +log = logging.getLogger(__name__) + + +class BuildApiMixin(object): + def build(self, path=None, tag=None, quiet=False, fileobj=None, + nocache=False, rm=False, stream=False, timeout=None, + custom_context=False, encoding=None, pull=False, + forcerm=False, dockerfile=None, container_limits=None, + decode=False, buildargs=None): + remote = context = headers = None + container_limits = container_limits or {} + if path is None and fileobj is None: + raise TypeError("Either path or fileobj needs to be provided.") + + for key in container_limits.keys(): + if key not in constants.CONTAINER_LIMITS_KEYS: + raise errors.DockerException( + 'Invalid container_limits key {0}'.format(key) + ) + + if custom_context: + if not fileobj: + raise TypeError("You must specify fileobj with custom_context") + context = fileobj + elif fileobj is not None: + context = utils.mkbuildcontext(fileobj) + elif path.startswith(('http://', 'https://', + 'git://', 'github.com/', 'git@')): + remote = path + elif not os.path.isdir(path): + raise TypeError("You must specify a directory to build in path") + else: + dockerignore = os.path.join(path, '.dockerignore') + exclude = None + if os.path.exists(dockerignore): + with open(dockerignore, 'r') as f: + exclude = list(filter(bool, f.read().splitlines())) + context = utils.tar(path, exclude=exclude, dockerfile=dockerfile) + + if utils.compare_version('1.8', self._version) >= 0: + stream = True + + if dockerfile and utils.compare_version('1.17', self._version) < 0: + raise errors.InvalidVersion( + 'dockerfile was only introduced in API version 1.17' + ) + + if utils.compare_version('1.19', self._version) < 0: + pull = 1 if pull else 0 + + u = self._url('/build') + params = { + 't': tag, + 'remote': remote, + 'q': quiet, + 'nocache': nocache, + 'rm': rm, + 'forcerm': forcerm, + 'pull': pull, + 'dockerfile': dockerfile, + } + params.update(container_limits) + + if buildargs: + if utils.version_gte(self._version, '1.21'): + params.update({'buildargs': json.dumps(buildargs)}) + else: + raise errors.InvalidVersion( + 'buildargs was only introduced in API version 1.21' + ) + + if context is not None: + headers = {'Content-Type': 'application/tar'} + if encoding: + headers['Content-Encoding'] = encoding + + if utils.compare_version('1.9', self._version) >= 0: + self._set_auth_headers(headers) + + response = self._post( + u, + data=context, + params=params, + headers=headers, + stream=stream, + timeout=timeout, + ) + + if context is not None and not custom_context: + context.close() + + if stream: + return self._stream_helper(response, decode=decode) + else: + output = self._result(response) + srch = r'Successfully built ([0-9a-f]+)' + match = re.search(srch, output) + if not match: + return None, output + return match.group(1), output + + def _set_auth_headers(self, headers): + log.debug('Looking for auth config') + + # If we don't have any auth data so far, try reloading the config + # file one more time in case anything showed up in there. + if not self._auth_configs: + log.debug("No auth config in memory - loading from filesystem") + self._auth_configs = auth.load_config() + + # Send the full auth configuration (if any exists), since the build + # could use any (or all) of the registries. + if self._auth_configs: + log.debug( + 'Sending auth config ({0})'.format( + ', '.join(repr(k) for k in self._auth_configs.keys()) + ) + ) + if headers is None: + headers = {} + if utils.compare_version('1.19', self._version) >= 0: + headers['X-Registry-Config'] = auth.encode_header( + self._auth_configs + ) + else: + headers['X-Registry-Config'] = auth.encode_header({ + 'configs': self._auth_configs + }) + else: + log.debug('No auth config found') diff --git a/testbed/docker__docker-py/docker/api/container.py b/testbed/docker__docker-py/docker/api/container.py new file mode 100644 index 0000000000000000000000000000000000000000..ef17c27c17ad4db071bd4e61fbe428d2cbf040a8 --- /dev/null +++ b/testbed/docker__docker-py/docker/api/container.py @@ -0,0 +1,442 @@ +import six +import warnings +from datetime import datetime + +from .. import errors +from .. import utils +from ..utils.utils import create_networking_config, create_endpoint_config + + +class ContainerApiMixin(object): + @utils.check_resource + def attach(self, container, stdout=True, stderr=True, + stream=False, logs=False): + params = { + 'logs': logs and 1 or 0, + 'stdout': stdout and 1 or 0, + 'stderr': stderr and 1 or 0, + 'stream': stream and 1 or 0, + } + u = self._url("/containers/{0}/attach", container) + response = self._post(u, params=params, stream=stream) + + return self._get_result(container, stream, response) + + @utils.check_resource + def attach_socket(self, container, params=None, ws=False): + if params is None: + params = { + 'stdout': 1, + 'stderr': 1, + 'stream': 1 + } + + if ws: + return self._attach_websocket(container, params) + + u = self._url("/containers/{0}/attach", container) + return self._get_raw_response_socket(self.post( + u, None, params=self._attach_params(params), stream=True)) + + @utils.check_resource + def commit(self, container, repository=None, tag=None, message=None, + author=None, conf=None): + params = { + 'container': container, + 'repo': repository, + 'tag': tag, + 'comment': message, + 'author': author + } + u = self._url("/commit") + return self._result(self._post_json(u, data=conf, params=params), + json=True) + + def containers(self, quiet=False, all=False, trunc=False, latest=False, + since=None, before=None, limit=-1, size=False, + filters=None): + params = { + 'limit': 1 if latest else limit, + 'all': 1 if all else 0, + 'size': 1 if size else 0, + 'trunc_cmd': 1 if trunc else 0, + 'since': since, + 'before': before + } + if filters: + params['filters'] = utils.convert_filters(filters) + u = self._url("/containers/json") + res = self._result(self._get(u, params=params), True) + + if quiet: + return [{'Id': x['Id']} for x in res] + if trunc: + for x in res: + x['Id'] = x['Id'][:12] + return res + + @utils.check_resource + def copy(self, container, resource): + if utils.version_gte(self._version, '1.20'): + warnings.warn( + 'Client.copy() is deprecated for API version >= 1.20, ' + 'please use get_archive() instead', + DeprecationWarning + ) + res = self._post_json( + self._url("/containers/{0}/copy".format(container)), + data={"Resource": resource}, + stream=True + ) + self._raise_for_status(res) + return res.raw + + def create_container(self, image, command=None, hostname=None, user=None, + detach=False, stdin_open=False, tty=False, + mem_limit=None, ports=None, environment=None, + dns=None, volumes=None, volumes_from=None, + network_disabled=False, name=None, entrypoint=None, + cpu_shares=None, working_dir=None, domainname=None, + memswap_limit=None, cpuset=None, host_config=None, + mac_address=None, labels=None, volume_driver=None, + stop_signal=None, networking_config=None): + + if isinstance(volumes, six.string_types): + volumes = [volumes, ] + + if host_config and utils.compare_version('1.15', self._version) < 0: + raise errors.InvalidVersion( + 'host_config is not supported in API < 1.15' + ) + + config = self.create_container_config( + image, command, hostname, user, detach, stdin_open, + tty, mem_limit, ports, environment, dns, volumes, volumes_from, + network_disabled, entrypoint, cpu_shares, working_dir, domainname, + memswap_limit, cpuset, host_config, mac_address, labels, + volume_driver, stop_signal, networking_config, + ) + return self.create_container_from_config(config, name) + + def create_container_config(self, *args, **kwargs): + return utils.create_container_config(self._version, *args, **kwargs) + + def create_container_from_config(self, config, name=None): + u = self._url("/containers/create") + params = { + 'name': name + } + res = self._post_json(u, data=config, params=params) + return self._result(res, True) + + def create_host_config(self, *args, **kwargs): + if not kwargs: + kwargs = {} + if 'version' in kwargs: + raise TypeError( + "create_host_config() got an unexpected " + "keyword argument 'version'" + ) + kwargs['version'] = self._version + return utils.create_host_config(*args, **kwargs) + + def create_networking_config(self, *args, **kwargs): + return create_networking_config(*args, **kwargs) + + def create_endpoint_config(self, *args, **kwargs): + return create_endpoint_config(self._version, *args, **kwargs) + + @utils.check_resource + def diff(self, container): + return self._result( + self._get(self._url("/containers/{0}/changes", container)), True + ) + + @utils.check_resource + def export(self, container): + res = self._get( + self._url("/containers/{0}/export", container), stream=True + ) + self._raise_for_status(res) + return res.raw + + @utils.check_resource + @utils.minimum_version('1.20') + def get_archive(self, container, path): + params = { + 'path': path + } + url = self._url('/containers/{0}/archive', container) + res = self._get(url, params=params, stream=True) + self._raise_for_status(res) + encoded_stat = res.headers.get('x-docker-container-path-stat') + return ( + res.raw, + utils.decode_json_header(encoded_stat) if encoded_stat else None + ) + + @utils.check_resource + def inspect_container(self, container): + return self._result( + self._get(self._url("/containers/{0}/json", container)), True + ) + + @utils.check_resource + def kill(self, container, signal=None): + url = self._url("/containers/{0}/kill", container) + params = {} + if signal is not None: + params['signal'] = signal + res = self._post(url, params=params) + + self._raise_for_status(res) + + @utils.check_resource + def logs(self, container, stdout=True, stderr=True, stream=False, + timestamps=False, tail='all', since=None, follow=None): + if utils.compare_version('1.11', self._version) >= 0: + if follow is None: + follow = stream + params = {'stderr': stderr and 1 or 0, + 'stdout': stdout and 1 or 0, + 'timestamps': timestamps and 1 or 0, + 'follow': follow and 1 or 0, + } + if utils.compare_version('1.13', self._version) >= 0: + if tail != 'all' and (not isinstance(tail, int) or tail < 0): + tail = 'all' + params['tail'] = tail + + if since is not None: + if utils.compare_version('1.19', self._version) < 0: + raise errors.InvalidVersion( + 'since is not supported in API < 1.19' + ) + else: + if isinstance(since, datetime): + params['since'] = utils.datetime_to_timestamp(since) + elif (isinstance(since, int) and since > 0): + params['since'] = since + url = self._url("/containers/{0}/logs", container) + res = self._get(url, params=params, stream=stream) + return self._get_result(container, stream, res) + return self.attach( + container, + stdout=stdout, + stderr=stderr, + stream=stream, + logs=True + ) + + @utils.check_resource + def pause(self, container): + url = self._url('/containers/{0}/pause', container) + res = self._post(url) + self._raise_for_status(res) + + @utils.check_resource + def port(self, container, private_port): + res = self._get(self._url("/containers/{0}/json", container)) + self._raise_for_status(res) + json_ = res.json() + private_port = str(private_port) + h_ports = None + + # Port settings is None when the container is running with + # network_mode=host. + port_settings = json_.get('NetworkSettings', {}).get('Ports') + if port_settings is None: + return None + + if '/' in private_port: + return port_settings.get(private_port) + + h_ports = port_settings.get(private_port + '/tcp') + if h_ports is None: + h_ports = port_settings.get(private_port + '/udp') + + return h_ports + + @utils.check_resource + @utils.minimum_version('1.20') + def put_archive(self, container, path, data): + params = {'path': path} + url = self._url('/containers/{0}/archive', container) + res = self._put(url, params=params, data=data) + self._raise_for_status(res) + return res.status_code == 200 + + @utils.check_resource + def remove_container(self, container, v=False, link=False, force=False): + params = {'v': v, 'link': link, 'force': force} + res = self._delete( + self._url("/containers/{0}", container), params=params + ) + self._raise_for_status(res) + + @utils.minimum_version('1.17') + @utils.check_resource + def rename(self, container, name): + url = self._url("/containers/{0}/rename", container) + params = {'name': name} + res = self._post(url, params=params) + self._raise_for_status(res) + + @utils.check_resource + def resize(self, container, height, width): + params = {'h': height, 'w': width} + url = self._url("/containers/{0}/resize", container) + res = self._post(url, params=params) + self._raise_for_status(res) + + @utils.check_resource + def restart(self, container, timeout=10): + params = {'t': timeout} + url = self._url("/containers/{0}/restart", container) + res = self._post(url, params=params) + self._raise_for_status(res) + + @utils.check_resource + def start(self, container, binds=None, port_bindings=None, lxc_conf=None, + publish_all_ports=None, links=None, privileged=None, + dns=None, dns_search=None, volumes_from=None, network_mode=None, + restart_policy=None, cap_add=None, cap_drop=None, devices=None, + extra_hosts=None, read_only=None, pid_mode=None, ipc_mode=None, + security_opt=None, ulimits=None): + + if utils.compare_version('1.10', self._version) < 0: + if dns is not None: + raise errors.InvalidVersion( + 'dns is only supported for API version >= 1.10' + ) + if volumes_from is not None: + raise errors.InvalidVersion( + 'volumes_from is only supported for API version >= 1.10' + ) + + if utils.compare_version('1.15', self._version) < 0: + if security_opt is not None: + raise errors.InvalidVersion( + 'security_opt is only supported for API version >= 1.15' + ) + if ipc_mode: + raise errors.InvalidVersion( + 'ipc_mode is only supported for API version >= 1.15' + ) + + if utils.compare_version('1.17', self._version) < 0: + if read_only is not None: + raise errors.InvalidVersion( + 'read_only is only supported for API version >= 1.17' + ) + if pid_mode is not None: + raise errors.InvalidVersion( + 'pid_mode is only supported for API version >= 1.17' + ) + + if utils.compare_version('1.18', self._version) < 0: + if ulimits is not None: + raise errors.InvalidVersion( + 'ulimits is only supported for API version >= 1.18' + ) + + start_config_kwargs = dict( + binds=binds, port_bindings=port_bindings, lxc_conf=lxc_conf, + publish_all_ports=publish_all_ports, links=links, dns=dns, + privileged=privileged, dns_search=dns_search, cap_add=cap_add, + cap_drop=cap_drop, volumes_from=volumes_from, devices=devices, + network_mode=network_mode, restart_policy=restart_policy, + extra_hosts=extra_hosts, read_only=read_only, pid_mode=pid_mode, + ipc_mode=ipc_mode, security_opt=security_opt, ulimits=ulimits + ) + start_config = None + + if any(v is not None for v in start_config_kwargs.values()): + if utils.compare_version('1.15', self._version) > 0: + warnings.warn( + 'Passing host config parameters in start() is deprecated. ' + 'Please use host_config in create_container instead!', + DeprecationWarning + ) + start_config = self.create_host_config(**start_config_kwargs) + + url = self._url("/containers/{0}/start", container) + res = self._post_json(url, data=start_config) + self._raise_for_status(res) + + @utils.minimum_version('1.17') + @utils.check_resource + def stats(self, container, decode=None, stream=True): + url = self._url("/containers/{0}/stats", container) + if stream: + return self._stream_helper(self._get(url, stream=True), + decode=decode) + else: + return self._result(self._get(url, params={'stream': False}), + json=True) + + @utils.check_resource + def stop(self, container, timeout=10): + params = {'t': timeout} + url = self._url("/containers/{0}/stop", container) + + res = self._post(url, params=params, + timeout=(timeout + (self.timeout or 0))) + self._raise_for_status(res) + + @utils.check_resource + def top(self, container, ps_args=None): + u = self._url("/containers/{0}/top", container) + params = {} + if ps_args is not None: + params['ps_args'] = ps_args + return self._result(self._get(u, params=params), True) + + @utils.check_resource + def unpause(self, container): + url = self._url('/containers/{0}/unpause', container) + res = self._post(url) + self._raise_for_status(res) + + @utils.minimum_version('1.22') + @utils.check_resource + def update_container( + self, container, blkio_weight=None, cpu_period=None, cpu_quota=None, + cpu_shares=None, cpuset_cpus=None, cpuset_mems=None, mem_limit=None, + mem_reservation=None, memswap_limit=None, kernel_memory=None + ): + url = self._url('/containers/{0}/update', container) + data = {} + if blkio_weight: + data['BlkioWeight'] = blkio_weight + if cpu_period: + data['CpuPeriod'] = cpu_period + if cpu_shares: + data['CpuShares'] = cpu_shares + if cpu_quota: + data['CpuQuota'] = cpu_quota + if cpuset_cpus: + data['CpusetCpus'] = cpuset_cpus + if cpuset_mems: + data['CpusetMems'] = cpuset_mems + if mem_limit: + data['Memory'] = utils.parse_bytes(mem_limit) + if mem_reservation: + data['MemoryReservation'] = utils.parse_bytes(mem_reservation) + if memswap_limit: + data['MemorySwap'] = utils.parse_bytes(memswap_limit) + if kernel_memory: + data['KernelMemory'] = utils.parse_bytes(kernel_memory) + + res = self._post_json(url, data=data) + return self._result(res, True) + + @utils.check_resource + def wait(self, container, timeout=None): + url = self._url("/containers/{0}/wait", container) + res = self._post(url, timeout=timeout) + self._raise_for_status(res) + json_ = res.json() + if 'StatusCode' in json_: + return json_['StatusCode'] + return -1 diff --git a/testbed/docker__docker-py/docker/api/daemon.py b/testbed/docker__docker-py/docker/api/daemon.py new file mode 100644 index 0000000000000000000000000000000000000000..a149e5e3720b53b8e82975576f8e1de984353720 --- /dev/null +++ b/testbed/docker__docker-py/docker/api/daemon.py @@ -0,0 +1,78 @@ +import os +import warnings +from datetime import datetime + +from ..auth import auth +from ..constants import INSECURE_REGISTRY_DEPRECATION_WARNING +from ..utils import utils + + +class DaemonApiMixin(object): + def events(self, since=None, until=None, filters=None, decode=None): + if isinstance(since, datetime): + since = utils.datetime_to_timestamp(since) + + if isinstance(until, datetime): + until = utils.datetime_to_timestamp(until) + + if filters: + filters = utils.convert_filters(filters) + + params = { + 'since': since, + 'until': until, + 'filters': filters + } + + return self._stream_helper( + self.get(self._url('/events'), params=params, stream=True), + decode=decode + ) + + def info(self): + return self._result(self._get(self._url("/info")), True) + + def login(self, username, password=None, email=None, registry=None, + reauth=False, insecure_registry=False, dockercfg_path=None): + if insecure_registry: + warnings.warn( + INSECURE_REGISTRY_DEPRECATION_WARNING.format('login()'), + DeprecationWarning + ) + + # If we don't have any auth data so far, try reloading the config file + # one more time in case anything showed up in there. + # If dockercfg_path is passed check to see if the config file exists, + # if so load that config. + if dockercfg_path and os.path.exists(dockercfg_path): + self._auth_configs = auth.load_config(dockercfg_path) + elif not self._auth_configs: + self._auth_configs = auth.load_config() + + registry = registry or auth.INDEX_URL + + authcfg = auth.resolve_authconfig(self._auth_configs, registry) + # If we found an existing auth config for this registry and username + # combination, we can return it immediately unless reauth is requested. + if authcfg and authcfg.get('username', None) == username \ + and not reauth: + return authcfg + + req_data = { + 'username': username, + 'password': password, + 'email': email, + 'serveraddress': registry, + } + + response = self._post_json(self._url('/auth'), data=req_data) + if response.status_code == 200: + self._auth_configs[registry] = req_data + return self._result(response, json=True) + + def ping(self): + return self._result(self._get(self._url('/_ping'))) + + def version(self, api_version=True): + url = self._url("/version", versioned_api=api_version) + return self._result(self._get(url), json=True) diff --git a/testbed/docker__docker-py/docker/api/exec_api.py b/testbed/docker__docker-py/docker/api/exec_api.py new file mode 100644 index 0000000000000000000000000000000000000000..f0e4afa6f9dfdc8f525df3dd53829fb9321fcfb2 --- /dev/null +++ b/testbed/docker__docker-py/docker/api/exec_api.py @@ -0,0 +1,75 @@ +import six + +from .. import errors +from .. import utils + + +class ExecApiMixin(object): + @utils.minimum_version('1.15') + @utils.check_resource + def exec_create(self, container, cmd, stdout=True, stderr=True, + stdin=False, tty=False, privileged=False, user=''): + if privileged and utils.compare_version('1.19', self._version) < 0: + raise errors.InvalidVersion( + 'Privileged exec is not supported in API < 1.19' + ) + if user and utils.compare_version('1.19', self._version) < 0: + raise errors.InvalidVersion( + 'User-specific exec is not supported in API < 1.19' + ) + if isinstance(cmd, six.string_types): + cmd = utils.split_command(cmd) + + data = { + 'Container': container, + 'User': user, + 'Privileged': privileged, + 'Tty': tty, + 'AttachStdin': stdin, + 'AttachStdout': stdout, + 'AttachStderr': stderr, + 'Cmd': cmd + } + + url = self._url('/containers/{0}/exec', container) + res = self._post_json(url, data=data) + return self._result(res, True) + + @utils.minimum_version('1.16') + def exec_inspect(self, exec_id): + if isinstance(exec_id, dict): + exec_id = exec_id.get('Id') + res = self._get(self._url("/exec/{0}/json", exec_id)) + return self._result(res, True) + + @utils.minimum_version('1.15') + def exec_resize(self, exec_id, height=None, width=None): + if isinstance(exec_id, dict): + exec_id = exec_id.get('Id') + + params = {'h': height, 'w': width} + url = self._url("/exec/{0}/resize", exec_id) + res = self._post(url, params=params) + self._raise_for_status(res) + + @utils.minimum_version('1.15') + def exec_start(self, exec_id, detach=False, tty=False, stream=False, + socket=False): + # we want opened socket if socket == True + if socket: + stream = True + if isinstance(exec_id, dict): + exec_id = exec_id.get('Id') + + data = { + 'Tty': tty, + 'Detach': detach + } + + res = self._post_json( + self._url('/exec/{0}/start', exec_id), data=data, stream=stream + ) + + if socket: + return self._get_raw_response_socket(res) + return self._get_result_tty(stream, res, tty) diff --git a/testbed/docker__docker-py/docker/api/image.py b/testbed/docker__docker-py/docker/api/image.py new file mode 100644 index 0000000000000000000000000000000000000000..3e66347e42e8afc98609402295fdb71835b9aaf2 --- /dev/null +++ b/testbed/docker__docker-py/docker/api/image.py @@ -0,0 +1,270 @@ +import logging +import six +import warnings + +from ..auth import auth +from ..constants import INSECURE_REGISTRY_DEPRECATION_WARNING +from .. import utils +from .. import errors + +log = logging.getLogger(__name__) + + +class ImageApiMixin(object): + + @utils.check_resource + def get_image(self, image): + res = self._get(self._url("/images/{0}/get", image), stream=True) + self._raise_for_status(res) + return res.raw + + @utils.check_resource + def history(self, image): + res = self._get(self._url("/images/{0}/history", image)) + return self._result(res, True) + + def images(self, name=None, quiet=False, all=False, viz=False, + filters=None): + if viz: + if utils.compare_version('1.7', self._version) >= 0: + raise Exception('Viz output is not supported in API >= 1.7!') + return self._result(self._get(self._url("images/viz"))) + params = { + 'filter': name, + 'only_ids': 1 if quiet else 0, + 'all': 1 if all else 0, + } + if filters: + params['filters'] = utils.convert_filters(filters) + res = self._result(self._get(self._url("/images/json"), params=params), + True) + if quiet: + return [x['Id'] for x in res] + return res + + def import_image(self, src=None, repository=None, tag=None, image=None): + if src: + if isinstance(src, six.string_types): + try: + result = self.import_image_from_file( + src, repository=repository, tag=tag) + except IOError: + result = self.import_image_from_url( + src, repository=repository, tag=tag) + else: + result = self.import_image_from_data( + src, repository=repository, tag=tag) + elif image: + result = self.import_image_from_image( + image, repository=repository, tag=tag) + else: + raise Exception("Must specify a src or image") + + return result + + def import_image_from_data(self, data, repository=None, tag=None): + u = self._url("/images/create") + params = { + 'fromSrc': '-', + 'repo': repository, + 'tag': tag + } + headers = { + 'Content-Type': 'application/tar', + } + return self._result( + self._post(u, data=data, params=params, headers=headers)) + + def import_image_from_file(self, filename, repository=None, tag=None): + u = self._url("/images/create") + params = { + 'fromSrc': '-', + 'repo': repository, + 'tag': tag + } + headers = { + 'Content-Type': 'application/tar', + } + with open(filename, 'rb') as f: + return self._result( + self._post(u, data=f, params=params, headers=headers, + timeout=None)) + + def import_image_from_stream(self, stream, repository=None, tag=None): + u = self._url("/images/create") + params = { + 'fromSrc': '-', + 'repo': repository, + 'tag': tag + } + headers = { + 'Content-Type': 'application/tar', + 'Transfer-Encoding': 'chunked', + } + return self._result( + self._post(u, data=stream, params=params, headers=headers)) + + def import_image_from_url(self, url, repository=None, tag=None): + u = self._url("/images/create") + params = { + 'fromSrc': url, + 'repo': repository, + 'tag': tag + } + return self._result( + self._post(u, data=None, params=params)) + + def import_image_from_image(self, image, repository=None, tag=None): + u = self._url("/images/create") + params = { + 'fromImage': image, + 'repo': repository, + 'tag': tag + } + return self._result( + self._post(u, data=None, params=params)) + + @utils.check_resource + def insert(self, image, url, path): + if utils.compare_version('1.12', self._version) >= 0: + raise errors.DeprecatedMethod( + 'insert is not available for API version >=1.12' + ) + api_url = self._url("/images/{0}/insert", image) + params = { + 'url': url, + 'path': path + } + return self._result(self._post(api_url, params=params)) + + @utils.check_resource + def inspect_image(self, image): + return self._result( + self._get(self._url("/images/{0}/json", image)), True + ) + + def load_image(self, data): + res = self._post(self._url("/images/load"), data=data) + self._raise_for_status(res) + + def pull(self, repository, tag=None, stream=False, + insecure_registry=False, auth_config=None, decode=False): + if insecure_registry: + warnings.warn( + INSECURE_REGISTRY_DEPRECATION_WARNING.format('pull()'), + DeprecationWarning + ) + + if not tag: + repository, tag = utils.parse_repository_tag(repository) + registry, repo_name = auth.resolve_repository_name(repository) + + params = { + 'tag': tag, + 'fromImage': repository + } + headers = {} + + if utils.compare_version('1.5', self._version) >= 0: + # If we don't have any auth data so far, try reloading the config + # file one more time in case anything showed up in there. + if auth_config is None: + log.debug('Looking for auth config') + if not self._auth_configs: + log.debug( + "No auth config in memory - loading from filesystem" + ) + self._auth_configs = auth.load_config() + authcfg = auth.resolve_authconfig(self._auth_configs, registry) + # Do not fail here if no authentication exists for this + # specific registry as we can have a readonly pull. Just + # put the header if we can. + if authcfg: + log.debug('Found auth config') + # auth_config needs to be a dict in the format used by + # auth.py username , password, serveraddress, email + headers['X-Registry-Auth'] = auth.encode_header( + authcfg + ) + else: + log.debug('No auth config found') + else: + log.debug('Sending supplied auth config') + headers['X-Registry-Auth'] = auth.encode_header(auth_config) + + response = self._post( + self._url('/images/create'), params=params, headers=headers, + stream=stream, timeout=None + ) + + self._raise_for_status(response) + + if stream: + return self._stream_helper(response, decode=decode) + + return self._result(response) + + def push(self, repository, tag=None, stream=False, + insecure_registry=False, decode=False): + if insecure_registry: + warnings.warn( + INSECURE_REGISTRY_DEPRECATION_WARNING.format('push()'), + DeprecationWarning + ) + + if not tag: + repository, tag = utils.parse_repository_tag(repository) + registry, repo_name = auth.resolve_repository_name(repository) + u = self._url("/images/{0}/push", repository) + params = { + 'tag': tag + } + headers = {} + + if utils.compare_version('1.5', self._version) >= 0: + # If we don't have any auth data so far, try reloading the config + # file one more time in case anything showed up in there. + if not self._auth_configs: + self._auth_configs = auth.load_config() + authcfg = auth.resolve_authconfig(self._auth_configs, registry) + + # Do not fail here if no authentication exists for this specific + # registry as we can have a readonly pull. Just put the header if + # we can. + if authcfg: + headers['X-Registry-Auth'] = auth.encode_header(authcfg) + + response = self._post_json( + u, None, headers=headers, stream=stream, params=params + ) + + self._raise_for_status(response) + + if stream: + return self._stream_helper(response, decode=decode) + + return self._result(response) + + @utils.check_resource + def remove_image(self, image, force=False, noprune=False): + params = {'force': force, 'noprune': noprune} + res = self._delete(self._url("/images/{0}", image), params=params) + self._raise_for_status(res) + + def search(self, term): + return self._result( + self._get(self._url("/images/search"), params={'term': term}), + True + ) + + @utils.check_resource + def tag(self, image, repository, tag=None, force=False): + params = { + 'tag': tag, + 'repo': repository, + 'force': 1 if force else 0 + } + url = self._url("/images/{0}/tag", image) + res = self._post(url, params=params) + self._raise_for_status(res) + return res.status_code == 201 diff --git a/testbed/docker__docker-py/docker/api/network.py b/testbed/docker__docker-py/docker/api/network.py new file mode 100644 index 0000000000000000000000000000000000000000..1f9ea9e47ee196ee999a1e50a1e0352a16deaaf8 --- /dev/null +++ b/testbed/docker__docker-py/docker/api/network.py @@ -0,0 +1,87 @@ +import json + +from ..errors import InvalidVersion +from ..utils import check_resource, minimum_version, normalize_links +from ..utils import version_lt + + +class NetworkApiMixin(object): + @minimum_version('1.21') + def networks(self, names=None, ids=None): + filters = {} + if names: + filters['name'] = names + if ids: + filters['id'] = ids + + params = {'filters': json.dumps(filters)} + + url = self._url("/networks") + res = self._get(url, params=params) + return self._result(res, json=True) + + @minimum_version('1.21') + def create_network(self, name, driver=None, options=None, ipam=None): + if options is not None and not isinstance(options, dict): + raise TypeError('options must be a dictionary') + + data = { + 'Name': name, + 'Driver': driver, + 'Options': options, + 'IPAM': ipam, + } + url = self._url("/networks/create") + res = self._post_json(url, data=data) + return self._result(res, json=True) + + @minimum_version('1.21') + def remove_network(self, net_id): + url = self._url("/networks/{0}", net_id) + res = self._delete(url) + self._raise_for_status(res) + + @minimum_version('1.21') + def inspect_network(self, net_id): + url = self._url("/networks/{0}", net_id) + res = self._get(url) + return self._result(res, json=True) + + @check_resource + @minimum_version('1.21') + def connect_container_to_network(self, container, net_id, + ipv4_address=None, ipv6_address=None, + aliases=None, links=None): + data = { + "Container": container, + "EndpointConfig": { + "Aliases": aliases, + "Links": normalize_links(links) if links else None, + }, + } + + # IPv4 or IPv6 or neither: + if ipv4_address or ipv6_address: + if version_lt(self._version, '1.22'): + raise InvalidVersion('IP address assignment is not ' + 'supported in API version < 1.22') + + data['EndpointConfig']['IPAMConfig'] = dict() + if ipv4_address: + data['EndpointConfig']['IPAMConfig']['IPv4Address'] = \ + ipv4_address + if ipv6_address: + data['EndpointConfig']['IPAMConfig']['IPv6Address'] = \ + ipv6_address + + url = self._url("/networks/{0}/connect", net_id) + res = self._post_json(url, data=data) + self._raise_for_status(res) + + @check_resource + @minimum_version('1.21') + def disconnect_container_from_network(self, container, net_id): + data = {"container": container} + url = self._url("/networks/{0}/disconnect", net_id) + res = self._post_json(url, data=data) + self._raise_for_status(res) diff --git a/testbed/docker__docker-py/docker/api/volume.py b/testbed/docker__docker-py/docker/api/volume.py new file mode 100644 index 0000000000000000000000000000000000000000..bb8b39b3164ab1164ad0582d5dc411a1a2ce702a --- /dev/null +++ b/testbed/docker__docker-py/docker/api/volume.py @@ -0,0 +1,35 @@ +from .. import utils + + +class VolumeApiMixin(object): + @utils.minimum_version('1.21') + def volumes(self, filters=None): + params = { + 'filters': utils.convert_filters(filters) if filters else None + } + url = self._url('/volumes') + return self._result(self._get(url, params=params), True) + + @utils.minimum_version('1.21') + def create_volume(self, name, driver=None, driver_opts=None): + url = self._url('/volumes/create') + if driver_opts is not None and not isinstance(driver_opts, dict): + raise TypeError('driver_opts must be a dictionary') + + data = { + 'Name': name, + 'Driver': driver, + 'DriverOpts': driver_opts, + } + return self._result(self._post_json(url, data=data), True) + + @utils.minimum_version('1.21') + def inspect_volume(self, name): + url = self._url('/volumes/{0}', name) + return self._result(self._get(url), True) + + @utils.minimum_version('1.21') + def remove_volume(self, name): + url = self._url('/volumes/{0}', name) + resp = self._delete(url) + self._raise_for_status(resp) diff --git a/testbed/docker__docker-py/docker/auth/__init__.py b/testbed/docker__docker-py/docker/auth/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6fc83f83c95f50d4634aca597e6c17c034553381 --- /dev/null +++ b/testbed/docker__docker-py/docker/auth/__init__.py @@ -0,0 +1,8 @@ +from .auth import ( + INDEX_NAME, + INDEX_URL, + encode_header, + load_config, + resolve_authconfig, + resolve_repository_name, +) # flake8: noqa \ No newline at end of file diff --git a/testbed/docker__docker-py/docker/auth/auth.py b/testbed/docker__docker-py/docker/auth/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..eedb7944cb49ba7af63ffb22745648093b803038 --- /dev/null +++ b/testbed/docker__docker-py/docker/auth/auth.py @@ -0,0 +1,230 @@ +# Copyright 2013 dotCloud inc. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import json +import logging +import os + +import six + +from .. import errors + +INDEX_NAME = 'docker.io' +INDEX_URL = 'https://{0}/v1/'.format(INDEX_NAME) +DOCKER_CONFIG_FILENAME = os.path.join('.docker', 'config.json') +LEGACY_DOCKER_CONFIG_FILENAME = '.dockercfg' + +log = logging.getLogger(__name__) + + +def resolve_repository_name(repo_name): + if '://' in repo_name: + raise errors.InvalidRepository( + 'Repository name cannot contain a scheme ({0})'.format(repo_name) + ) + + index_name, remote_name = split_repo_name(repo_name) + if index_name[0] == '-' or index_name[-1] == '-': + raise errors.InvalidRepository( + 'Invalid index name ({0}). Cannot begin or end with a' + ' hyphen.'.format(index_name) + ) + return resolve_index_name(index_name), remote_name + + +def resolve_index_name(index_name): + index_name = convert_to_hostname(index_name) + if index_name == 'index.' + INDEX_NAME: + index_name = INDEX_NAME + return index_name + + +def split_repo_name(repo_name): + parts = repo_name.split('/', 1) + if len(parts) == 1 or ( + '.' not in parts[0] and ':' not in parts[0] and parts[0] != 'localhost' + ): + # This is a docker index repo (ex: username/foobar or ubuntu) + return INDEX_NAME, repo_name + return tuple(parts) + + +def resolve_authconfig(authconfig, registry=None): + """ + Returns the authentication data from the given auth configuration for a + specific registry. As with the Docker client, legacy entries in the config + with full URLs are stripped down to hostnames before checking for a match. + Returns None if no match was found. + """ + # Default to the public index server + registry = resolve_index_name(registry) if registry else INDEX_NAME + log.debug("Looking for auth entry for {0}".format(repr(registry))) + + if registry in authconfig: + log.debug("Found {0}".format(repr(registry))) + return authconfig[registry] + + for key, config in six.iteritems(authconfig): + if resolve_index_name(key) == registry: + log.debug("Found {0}".format(repr(key))) + return config + + log.debug("No entry found") + return None + + +def convert_to_hostname(url): + return url.replace('http://', '').replace('https://', '').split('/', 1)[0] + + +def decode_auth(auth): + if isinstance(auth, six.string_types): + auth = auth.encode('ascii') + s = base64.b64decode(auth) + login, pwd = s.split(b':', 1) + return login.decode('utf8'), pwd.decode('utf8') + + +def encode_header(auth): + auth_json = json.dumps(auth).encode('ascii') + return base64.urlsafe_b64encode(auth_json) + + +def parse_auth(entries, raise_on_error=False): + """ + Parses authentication entries + + Args: + entries: Dict of authentication entries. + raise_on_error: If set to true, an invalid format will raise + InvalidConfigFile + + Returns: + Authentication registry. + """ + + conf = {} + for registry, entry in six.iteritems(entries): + if not (isinstance(entry, dict) and 'auth' in entry): + log.debug( + 'Config entry for key {0} is not auth config'.format(registry) + ) + # We sometimes fall back to parsing the whole config as if it was + # the auth config by itself, for legacy purposes. In that case, we + # fail silently and return an empty conf if any of the keys is not + # formatted properly. + if raise_on_error: + raise errors.InvalidConfigFile( + 'Invalid configuration for registry {0}'.format(registry) + ) + return {} + username, password = decode_auth(entry['auth']) + log.debug( + 'Found entry (registry={0}, username={1})' + .format(repr(registry), repr(username)) + ) + conf[registry] = { + 'username': username, + 'password': password, + 'email': entry.get('email'), + 'serveraddress': registry, + } + return conf + + +def find_config_file(config_path=None): + environment_path = os.path.join( + os.environ.get('DOCKER_CONFIG'), + os.path.basename(DOCKER_CONFIG_FILENAME) + ) if os.environ.get('DOCKER_CONFIG') else None + + paths = [ + config_path, # 1 + environment_path, # 2 + os.path.join(os.path.expanduser('~'), DOCKER_CONFIG_FILENAME), # 3 + os.path.join( + os.path.expanduser('~'), LEGACY_DOCKER_CONFIG_FILENAME + ) # 4 + ] + + for path in paths: + if path and os.path.exists(path): + return path + return None + + +def load_config(config_path=None): + """ + Loads authentication data from a Docker configuration file in the given + root directory or if config_path is passed use given path. + Lookup priority: + explicit config_path parameter > DOCKER_CONFIG environment variable > + ~/.docker/config.json > ~/.dockercfg + """ + config_file = find_config_file(config_path) + + if not config_file: + log.debug("File doesn't exist") + return {} + + try: + with open(config_file) as f: + data = json.load(f) + res = {} + if data.get('auths'): + log.debug("Found 'auths' section") + res.update(parse_auth(data['auths'], raise_on_error=True)) + if data.get('HttpHeaders'): + log.debug("Found 'HttpHeaders' section") + res.update({'HttpHeaders': data['HttpHeaders']}) + if res: + return res + else: + log.debug("Couldn't find 'auths' or 'HttpHeaders' sections") + f.seek(0) + return parse_auth(json.load(f)) + except (IOError, KeyError, ValueError) as e: + # Likely missing new Docker config file or it's in an + # unknown format, continue to attempt to read old location + # and format. + log.debug(e) + + log.debug("Attempting to parse legacy auth file format") + try: + data = [] + with open(config_file) as f: + for line in f.readlines(): + data.append(line.strip().split(' = ')[1]) + if len(data) < 2: + # Not enough data + raise errors.InvalidConfigFile( + 'Invalid or empty configuration file!' + ) + + username, password = decode_auth(data[0]) + return { + INDEX_NAME: { + 'username': username, + 'password': password, + 'email': data[1], + 'serveraddress': INDEX_URL, + } + } + except Exception as e: + log.debug(e) + pass + + log.debug("All parsing attempts failed - returning empty config") + return {} diff --git a/testbed/docker__docker-py/docker/client.py b/testbed/docker__docker-py/docker/client.py new file mode 100644 index 0000000000000000000000000000000000000000..7d1f7c46696880f2731c1f59d6df57423810e9d2 --- /dev/null +++ b/testbed/docker__docker-py/docker/client.py @@ -0,0 +1,341 @@ +# Copyright 2013 dotCloud inc. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import struct +import sys + +import requests +import requests.exceptions +import six +import websocket + + +from . import api +from . import constants +from . import errors +from .auth import auth +from .unixconn import unixconn +from .ssladapter import ssladapter +from .utils import utils, check_resource, update_headers +from .tls import TLSConfig + + +class Client( + requests.Session, + api.BuildApiMixin, + api.ContainerApiMixin, + api.DaemonApiMixin, + api.ExecApiMixin, + api.ImageApiMixin, + api.VolumeApiMixin, + api.NetworkApiMixin): + def __init__(self, base_url=None, version=None, + timeout=constants.DEFAULT_TIMEOUT_SECONDS, tls=False): + super(Client, self).__init__() + + if tls and not base_url: + raise errors.TLSParameterError( + 'If using TLS, the base_url argument must be provided.' + ) + + self.base_url = base_url + self.timeout = timeout + + self._auth_configs = auth.load_config() + + base_url = utils.parse_host(base_url, sys.platform, tls=bool(tls)) + if base_url.startswith('http+unix://'): + self._custom_adapter = unixconn.UnixAdapter(base_url, timeout) + self.mount('http+docker://', self._custom_adapter) + self.base_url = 'http+docker://localunixsocket' + else: + # Use SSLAdapter for the ability to specify SSL version + if isinstance(tls, TLSConfig): + tls.configure_client(self) + elif tls: + self._custom_adapter = ssladapter.SSLAdapter() + self.mount('https://', self._custom_adapter) + self.base_url = base_url + + # version detection needs to be after unix adapter mounting + if version is None: + self._version = constants.DEFAULT_DOCKER_API_VERSION + elif isinstance(version, six.string_types): + if version.lower() == 'auto': + self._version = self._retrieve_server_version() + else: + self._version = version + else: + raise errors.DockerException( + 'Version parameter must be a string or None. Found {0}'.format( + type(version).__name__ + ) + ) + + def _retrieve_server_version(self): + try: + return self.version(api_version=False)["ApiVersion"] + except KeyError: + raise errors.DockerException( + 'Invalid response from docker daemon: key "ApiVersion"' + ' is missing.' + ) + except Exception as e: + raise errors.DockerException( + 'Error while fetching server API version: {0}'.format(e) + ) + + def _set_request_timeout(self, kwargs): + """Prepare the kwargs for an HTTP request by inserting the timeout + parameter, if not already present.""" + kwargs.setdefault('timeout', self.timeout) + return kwargs + + @update_headers + def _post(self, url, **kwargs): + return self.post(url, **self._set_request_timeout(kwargs)) + + @update_headers + def _get(self, url, **kwargs): + return self.get(url, **self._set_request_timeout(kwargs)) + + @update_headers + def _put(self, url, **kwargs): + return self.put(url, **self._set_request_timeout(kwargs)) + + @update_headers + def _delete(self, url, **kwargs): + return self.delete(url, **self._set_request_timeout(kwargs)) + + def _url(self, pathfmt, *args, **kwargs): + for arg in args: + if not isinstance(arg, six.string_types): + raise ValueError( + 'Expected a string but found {0} ({1}) ' + 'instead'.format(arg, type(arg)) + ) + + args = map(six.moves.urllib.parse.quote_plus, args) + + if kwargs.get('versioned_api', True): + return '{0}/v{1}{2}'.format( + self.base_url, self._version, pathfmt.format(*args) + ) + else: + return '{0}{1}'.format(self.base_url, pathfmt.format(*args)) + + def _raise_for_status(self, response, explanation=None): + """Raises stored :class:`APIError`, if one occurred.""" + try: + response.raise_for_status() + except requests.exceptions.HTTPError as e: + if e.response.status_code == 404: + raise errors.NotFound(e, response, explanation=explanation) + raise errors.APIError(e, response, explanation=explanation) + + def _result(self, response, json=False, binary=False): + assert not (json and binary) + self._raise_for_status(response) + + if json: + return response.json() + if binary: + return response.content + return response.text + + def _post_json(self, url, data, **kwargs): + # Go <1.1 can't unserialize null to a string + # so we do this disgusting thing here. + data2 = {} + if data is not None: + for k, v in six.iteritems(data): + if v is not None: + data2[k] = v + + if 'headers' not in kwargs: + kwargs['headers'] = {} + kwargs['headers']['Content-Type'] = 'application/json' + return self._post(url, data=json.dumps(data2), **kwargs) + + def _attach_params(self, override=None): + return override or { + 'stdout': 1, + 'stderr': 1, + 'stream': 1 + } + + @check_resource + def _attach_websocket(self, container, params=None): + url = self._url("/containers/{0}/attach/ws", container) + req = requests.Request("POST", url, params=self._attach_params(params)) + full_url = req.prepare().url + full_url = full_url.replace("http://", "ws://", 1) + full_url = full_url.replace("https://", "wss://", 1) + return self._create_websocket_connection(full_url) + + def _create_websocket_connection(self, url): + return websocket.create_connection(url) + + def _get_raw_response_socket(self, response): + self._raise_for_status(response) + if six.PY3: + sock = response.raw._fp.fp.raw + if self.base_url.startswith("https://"): + sock = sock._sock + else: + sock = response.raw._fp.fp._sock + try: + # Keep a reference to the response to stop it being garbage + # collected. If the response is garbage collected, it will + # close TLS sockets. + sock._response = response + except AttributeError: + # UNIX sockets can't have attributes set on them, but that's + # fine because we won't be doing TLS over them + pass + + return sock + + def _stream_helper(self, response, decode=False): + """Generator for data coming from a chunked-encoded HTTP response.""" + if response.raw._fp.chunked: + reader = response.raw + while not reader.closed: + # this read call will block until we get a chunk + data = reader.read(1) + if not data: + break + if reader._fp.chunk_left: + data += reader.read(reader._fp.chunk_left) + if decode: + if six.PY3: + data = data.decode('utf-8') + data = json.loads(data) + yield data + else: + # Response isn't chunked, meaning we probably + # encountered an error immediately + yield self._result(response) + + def _multiplexed_buffer_helper(self, response): + """A generator of multiplexed data blocks read from a buffered + response.""" + buf = self._result(response, binary=True) + walker = 0 + while True: + if len(buf[walker:]) < 8: + break + _, length = struct.unpack_from('>BxxxL', buf[walker:]) + start = walker + constants.STREAM_HEADER_SIZE_BYTES + end = start + length + walker = end + yield buf[start:end] + + def _multiplexed_response_stream_helper(self, response): + """A generator of multiplexed data blocks coming from a response + stream.""" + + # Disable timeout on the underlying socket to prevent + # Read timed out(s) for long running processes + socket = self._get_raw_response_socket(response) + self._disable_socket_timeout(socket) + + while True: + header = response.raw.read(constants.STREAM_HEADER_SIZE_BYTES) + if not header: + break + _, length = struct.unpack('>BxxxL', header) + if not length: + continue + data = response.raw.read(length) + if not data: + break + yield data + + def _stream_raw_result_old(self, response): + ''' Stream raw output for API versions below 1.6 ''' + self._raise_for_status(response) + for line in response.iter_lines(chunk_size=1, + decode_unicode=True): + # filter out keep-alive new lines + if line: + yield line + + def _stream_raw_result(self, response): + ''' Stream result for TTY-enabled container above API 1.6 ''' + self._raise_for_status(response) + for out in response.iter_content(chunk_size=1, decode_unicode=True): + yield out + + def _disable_socket_timeout(self, socket): + """ Depending on the combination of python version and whether we're + connecting over http or https, we might need to access _sock, which + may or may not exist; or we may need to just settimeout on socket + itself, which also may or may not have settimeout on it. + + To avoid missing the correct one, we try both. + """ + if hasattr(socket, "settimeout"): + socket.settimeout(None) + if hasattr(socket, "_sock") and hasattr(socket._sock, "settimeout"): + socket._sock.settimeout(None) + + def _get_result(self, container, stream, res): + cont = self.inspect_container(container) + return self._get_result_tty(stream, res, cont['Config']['Tty']) + + def _get_result_tty(self, stream, res, is_tty): + # Stream multi-plexing was only introduced in API v1.6. Anything + # before that needs old-style streaming. + if utils.compare_version('1.6', self._version) < 0: + return self._stream_raw_result_old(res) + + # We should also use raw streaming (without keep-alives) + # if we're dealing with a tty-enabled container. + if is_tty: + return self._stream_raw_result(res) if stream else \ + self._result(res, binary=True) + + self._raise_for_status(res) + sep = six.binary_type() + if stream: + return self._multiplexed_response_stream_helper(res) + else: + return sep.join( + [x for x in self._multiplexed_buffer_helper(res)] + ) + + def get_adapter(self, url): + try: + return super(Client, self).get_adapter(url) + except requests.exceptions.InvalidSchema as e: + if self._custom_adapter: + return self._custom_adapter + else: + raise e + + @property + def api_version(self): + return self._version + + +class AutoVersionClient(Client): + def __init__(self, *args, **kwargs): + if 'version' in kwargs and kwargs['version']: + raise errors.DockerException( + 'Can not specify version for AutoVersionClient' + ) + kwargs['version'] = 'auto' + super(AutoVersionClient, self).__init__(*args, **kwargs) diff --git a/testbed/docker__docker-py/docker/constants.py b/testbed/docker__docker-py/docker/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..0627ba09d47771b98e2ea83b65baf30fb50d1149 --- /dev/null +++ b/testbed/docker__docker-py/docker/constants.py @@ -0,0 +1,10 @@ +DEFAULT_DOCKER_API_VERSION = '1.21' +DEFAULT_TIMEOUT_SECONDS = 60 +STREAM_HEADER_SIZE_BYTES = 8 +CONTAINER_LIMITS_KEYS = [ + 'memory', 'memswap', 'cpushares', 'cpusetcpus' +] + +INSECURE_REGISTRY_DEPRECATION_WARNING = \ + 'The `insecure_registry` argument to {} ' \ + 'is deprecated and non-functional. Please remove it.' diff --git a/testbed/docker__docker-py/docker/errors.py b/testbed/docker__docker-py/docker/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..e85910cdfc45077d76ea9c9e2a56a56b784ba2cf --- /dev/null +++ b/testbed/docker__docker-py/docker/errors.py @@ -0,0 +1,88 @@ +# Copyright 2014 dotCloud inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import requests + + +class APIError(requests.exceptions.HTTPError): + def __init__(self, message, response, explanation=None): + # requests 1.2 supports response as a keyword argument, but + # requests 1.1 doesn't + super(APIError, self).__init__(message) + self.response = response + + self.explanation = explanation + + if self.explanation is None and response.content: + self.explanation = response.content.strip() + + def __str__(self): + message = super(APIError, self).__str__() + + if self.is_client_error(): + message = '{0} Client Error: {1}'.format( + self.response.status_code, self.response.reason) + + elif self.is_server_error(): + message = '{0} Server Error: {1}'.format( + self.response.status_code, self.response.reason) + + if self.explanation: + message = '{0} ("{1}")'.format(message, self.explanation) + + return message + + def is_client_error(self): + return 400 <= self.response.status_code < 500 + + def is_server_error(self): + return 500 <= self.response.status_code < 600 + + +class DockerException(Exception): + pass + + +class NotFound(APIError): + pass + + +class InvalidVersion(DockerException): + pass + + +class InvalidRepository(DockerException): + pass + + +class InvalidConfigFile(DockerException): + pass + + +class DeprecatedMethod(DockerException): + pass + + +class TLSParameterError(DockerException): + def __init__(self, msg): + self.msg = msg + + def __str__(self): + return self.msg + (". TLS configurations should map the Docker CLI " + "client configurations. See " + "https://docs.docker.com/engine/articles/https/ " + "for API details.") + + +class NullResource(DockerException, ValueError): + pass diff --git a/testbed/docker__docker-py/docker/ssladapter/__init__.py b/testbed/docker__docker-py/docker/ssladapter/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1a5e1bb6d420a8ff149ffd1acf95d734d01b478e --- /dev/null +++ b/testbed/docker__docker-py/docker/ssladapter/__init__.py @@ -0,0 +1 @@ +from .ssladapter import SSLAdapter # flake8: noqa diff --git a/testbed/docker__docker-py/docker/ssladapter/ssladapter.py b/testbed/docker__docker-py/docker/ssladapter/ssladapter.py new file mode 100644 index 0000000000000000000000000000000000000000..5b43aa2ed998d2ddac926911c01beb8cabc7966f --- /dev/null +++ b/testbed/docker__docker-py/docker/ssladapter/ssladapter.py @@ -0,0 +1,57 @@ +""" Resolves OpenSSL issues in some servers: + https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/ + https://github.com/kennethreitz/requests/pull/799 +""" +from distutils.version import StrictVersion +from requests.adapters import HTTPAdapter + +try: + import requests.packages.urllib3 as urllib3 +except ImportError: + import urllib3 + +PoolManager = urllib3.poolmanager.PoolManager + + +class SSLAdapter(HTTPAdapter): + '''An HTTPS Transport Adapter that uses an arbitrary SSL version.''' + def __init__(self, ssl_version=None, assert_hostname=None, + assert_fingerprint=None, **kwargs): + self.ssl_version = ssl_version + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + super(SSLAdapter, self).__init__(**kwargs) + + def init_poolmanager(self, connections, maxsize, block=False): + kwargs = { + 'num_pools': connections, + 'maxsize': maxsize, + 'block': block, + 'assert_hostname': self.assert_hostname, + 'assert_fingerprint': self.assert_fingerprint, + } + if self.ssl_version and self.can_override_ssl_version(): + kwargs['ssl_version'] = self.ssl_version + + self.poolmanager = PoolManager(**kwargs) + + def get_connection(self, *args, **kwargs): + """ + Ensure assert_hostname is set correctly on our pool + + We already take care of a normal poolmanager via init_poolmanager + + But we still need to take care of when there is a proxy poolmanager + """ + conn = super(SSLAdapter, self).get_connection(*args, **kwargs) + if conn.assert_hostname != self.assert_hostname: + conn.assert_hostname = self.assert_hostname + return conn + + def can_override_ssl_version(self): + urllib_ver = urllib3.__version__.split('-')[0] + if urllib_ver is None: + return False + if urllib_ver == 'dev': + return True + return StrictVersion(urllib_ver) > StrictVersion('1.5') diff --git a/testbed/docker__docker-py/docker/tls.py b/testbed/docker__docker-py/docker/tls.py new file mode 100644 index 0000000000000000000000000000000000000000..7abfa60e1d493037739b3424c6fc9de0f3492315 --- /dev/null +++ b/testbed/docker__docker-py/docker/tls.py @@ -0,0 +1,75 @@ +import os +import ssl + +from . import errors +from .ssladapter import ssladapter + + +class TLSConfig(object): + cert = None + ca_cert = None + verify = None + ssl_version = None + + def __init__(self, client_cert=None, ca_cert=None, verify=None, + ssl_version=None, assert_hostname=None, + assert_fingerprint=None): + # Argument compatibility/mapping with + # https://docs.docker.com/engine/articles/https/ + # This diverges from the Docker CLI in that users can specify 'tls' + # here, but also disable any public/default CA pool verification by + # leaving tls_verify=False + + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + + # TLS v1.0 seems to be the safest default; SSLv23 fails in mysterious + # ways: https://github.com/docker/docker-py/issues/963 + + self.ssl_version = ssl_version or ssl.PROTOCOL_TLSv1 + + # "tls" and "tls_verify" must have both or neither cert/key files + # In either case, Alert the user when both are expected, but any are + # missing. + + if client_cert: + try: + tls_cert, tls_key = client_cert + except ValueError: + raise errors.TLSParameterError( + 'client_config must be a tuple of' + ' (client certificate, key file)' + ) + + if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or + not os.path.isfile(tls_key)): + raise errors.TLSParameterError( + 'Path to a certificate and key files must be provided' + ' through the client_config param' + ) + self.cert = (tls_cert, tls_key) + + # If verify is set, make sure the cert exists + self.verify = verify + self.ca_cert = ca_cert + if self.verify and self.ca_cert and not os.path.isfile(self.ca_cert): + raise errors.TLSParameterError( + 'Invalid CA certificate provided for `tls_ca_cert`.' + ) + + def configure_client(self, client): + client.ssl_version = self.ssl_version + + if self.verify and self.ca_cert: + client.verify = self.ca_cert + else: + client.verify = self.verify + + if self.cert: + client.cert = self.cert + + client.mount('https://', ssladapter.SSLAdapter( + ssl_version=self.ssl_version, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + )) diff --git a/testbed/docker__docker-py/docker/unixconn/__init__.py b/testbed/docker__docker-py/docker/unixconn/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..53711fc6d87b1e6a612d88a40a9bfbdec76d5fc0 --- /dev/null +++ b/testbed/docker__docker-py/docker/unixconn/__init__.py @@ -0,0 +1 @@ +from .unixconn import UnixAdapter # flake8: noqa diff --git a/testbed/docker__docker-py/docker/unixconn/unixconn.py b/testbed/docker__docker-py/docker/unixconn/unixconn.py new file mode 100644 index 0000000000000000000000000000000000000000..d7e249ef0e5ce97a926e5d0cd0968aa515037b7c --- /dev/null +++ b/testbed/docker__docker-py/docker/unixconn/unixconn.py @@ -0,0 +1,92 @@ +# Copyright 2013 dotCloud inc. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import six +import requests.adapters +import socket + +if six.PY3: + import http.client as httplib +else: + import httplib + +try: + import requests.packages.urllib3 as urllib3 +except ImportError: + import urllib3 + +RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer + + +class UnixHTTPConnection(httplib.HTTPConnection, object): + def __init__(self, base_url, unix_socket, timeout=60): + httplib.HTTPConnection.__init__(self, 'localhost', timeout=timeout) + self.base_url = base_url + self.unix_socket = unix_socket + self.timeout = timeout + + def connect(self): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + sock.settimeout(self.timeout) + sock.connect(self.unix_socket) + self.sock = sock + + +class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): + def __init__(self, base_url, socket_path, timeout=60): + urllib3.connectionpool.HTTPConnectionPool.__init__( + self, 'localhost', timeout=timeout + ) + self.base_url = base_url + self.socket_path = socket_path + self.timeout = timeout + + def _new_conn(self): + return UnixHTTPConnection(self.base_url, self.socket_path, + self.timeout) + + +class UnixAdapter(requests.adapters.HTTPAdapter): + def __init__(self, socket_url, timeout=60): + socket_path = socket_url.replace('http+unix://', '') + if not socket_path.startswith('/'): + socket_path = '/' + socket_path + self.socket_path = socket_path + self.timeout = timeout + self.pools = RecentlyUsedContainer(10, + dispose_func=lambda p: p.close()) + super(UnixAdapter, self).__init__() + + def get_connection(self, url, proxies=None): + with self.pools.lock: + pool = self.pools.get(url) + if pool: + return pool + + pool = UnixHTTPConnectionPool( + url, self.socket_path, self.timeout + ) + self.pools[url] = pool + + return pool + + def request_url(self, request, proxies): + # The select_proxy utility in requests errors out when the provided URL + # doesn't have a hostname, like is the case when using a UNIX socket. + # Since proxies are an irrelevant notion in the case of UNIX sockets + # anyway, we simply return the path URL directly. + # See also: https://github.com/docker/docker-py/issues/811 + return request.path_url + + def close(self): + self.pools.clear() diff --git a/testbed/docker__docker-py/docker/utils/__init__.py b/testbed/docker__docker-py/docker/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ccc38191ee585712213db34a038256e672950405 --- /dev/null +++ b/testbed/docker__docker-py/docker/utils/__init__.py @@ -0,0 +1,11 @@ +from .utils import ( + compare_version, convert_port_bindings, convert_volume_binds, + mkbuildcontext, tar, exclude_paths, parse_repository_tag, parse_host, + kwargs_from_env, convert_filters, datetime_to_timestamp, create_host_config, + create_container_config, parse_bytes, ping_registry, parse_env_file, + version_lt, version_gte, decode_json_header, split_command, + create_ipam_config, create_ipam_pool, parse_devices, normalize_links, +) # flake8: noqa + +from .types import Ulimit, LogConfig # flake8: noqa +from .decorators import check_resource, minimum_version, update_headers #flake8: noqa diff --git a/testbed/docker__docker-py/docker/utils/decorators.py b/testbed/docker__docker-py/docker/utils/decorators.py new file mode 100644 index 0000000000000000000000000000000000000000..7c41a5f80505e2ff539c73203ef1581c35a0b307 --- /dev/null +++ b/testbed/docker__docker-py/docker/utils/decorators.py @@ -0,0 +1,48 @@ +import functools + +from .. import errors +from . import utils + + +def check_resource(f): + @functools.wraps(f) + def wrapped(self, resource_id=None, *args, **kwargs): + if resource_id is None: + if kwargs.get('container'): + resource_id = kwargs.pop('container') + elif kwargs.get('image'): + resource_id = kwargs.pop('image') + if isinstance(resource_id, dict): + resource_id = resource_id.get('Id') + if not resource_id: + raise errors.NullResource( + 'image or container param is undefined' + ) + return f(self, resource_id, *args, **kwargs) + return wrapped + + +def minimum_version(version): + def decorator(f): + @functools.wraps(f) + def wrapper(self, *args, **kwargs): + if utils.version_lt(self._version, version): + raise errors.InvalidVersion( + '{0} is not available for version < {1}'.format( + f.__name__, version + ) + ) + return f(self, *args, **kwargs) + return wrapper + return decorator + + +def update_headers(f): + def inner(self, *args, **kwargs): + if 'HttpHeaders' in self._auth_configs: + if 'headers' not in kwargs: + kwargs['headers'] = self._auth_configs['HttpHeaders'] + else: + kwargs['headers'].update(self._auth_configs['HttpHeaders']) + return f(self, *args, **kwargs) + return inner diff --git a/testbed/docker__docker-py/docker/utils/ports/__init__.py b/testbed/docker__docker-py/docker/utils/ports/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1dbfa3a7094d061e466cd73b0b16cc56200f7fcf --- /dev/null +++ b/testbed/docker__docker-py/docker/utils/ports/__init__.py @@ -0,0 +1,4 @@ +from .ports import ( + split_port, + build_port_bindings +) # flake8: noqa diff --git a/testbed/docker__docker-py/docker/utils/ports/ports.py b/testbed/docker__docker-py/docker/utils/ports/ports.py new file mode 100644 index 0000000000000000000000000000000000000000..326ef94f47f6018d4d1f9e73019dcaa08914b297 --- /dev/null +++ b/testbed/docker__docker-py/docker/utils/ports/ports.py @@ -0,0 +1,92 @@ + +def add_port_mapping(port_bindings, internal_port, external): + if internal_port in port_bindings: + port_bindings[internal_port].append(external) + else: + port_bindings[internal_port] = [external] + + +def add_port(port_bindings, internal_port_range, external_range): + if external_range is None: + for internal_port in internal_port_range: + add_port_mapping(port_bindings, internal_port, None) + else: + ports = zip(internal_port_range, external_range) + for internal_port, external_port in ports: + add_port_mapping(port_bindings, internal_port, external_port) + + +def build_port_bindings(ports): + port_bindings = {} + for port in ports: + internal_port_range, external_range = split_port(port) + add_port(port_bindings, internal_port_range, external_range) + return port_bindings + + +def to_port_range(port): + if not port: + return None + + protocol = "" + if "/" in port: + parts = port.split("/") + if len(parts) != 2: + _raise_invalid_port(port) + + port, protocol = parts + protocol = "/" + protocol + + parts = str(port).split('-') + + if len(parts) == 1: + return ["%s%s" % (port, protocol)] + + if len(parts) == 2: + full_port_range = range(int(parts[0]), int(parts[1]) + 1) + return ["%s%s" % (p, protocol) for p in full_port_range] + + raise ValueError('Invalid port range "%s", should be ' + 'port or startport-endport' % port) + + +def _raise_invalid_port(port): + raise ValueError('Invalid port "%s", should be ' + '[[remote_ip:]remote_port[-remote_port]:]' + 'port[/protocol]' % port) + + +def split_port(port): + parts = str(port).split(':') + + if not 1 <= len(parts) <= 3: + _raise_invalid_port(port) + + if len(parts) == 1: + internal_port, = parts + return to_port_range(internal_port), None + if len(parts) == 2: + external_port, internal_port = parts + + internal_range = to_port_range(internal_port) + external_range = to_port_range(external_port) + + if internal_range is None or external_range is None: + _raise_invalid_port(port) + + if len(internal_range) != len(external_range): + raise ValueError('Port ranges don\'t match in length') + + return internal_range, external_range + + external_ip, external_port, internal_port = parts + internal_range = to_port_range(internal_port) + external_range = to_port_range(external_port) + if not external_range: + external_range = [None] * len(internal_range) + + if len(internal_range) != len(external_range): + raise ValueError('Port ranges don\'t match in length') + + return internal_range, [(external_ip, ex_port or None) + for ex_port in external_range] diff --git a/testbed/docker__docker-py/docker/utils/types.py b/testbed/docker__docker-py/docker/utils/types.py new file mode 100644 index 0000000000000000000000000000000000000000..ea9f06d549a28b374c1a15dc90614eb3cfd6cc8b --- /dev/null +++ b/testbed/docker__docker-py/docker/utils/types.py @@ -0,0 +1,96 @@ +import six + + +class LogConfigTypesEnum(object): + _values = ( + 'json-file', + 'syslog', + 'journald', + 'gelf', + 'fluentd', + 'none' + ) + JSON, SYSLOG, JOURNALD, GELF, FLUENTD, NONE = _values + + +class DictType(dict): + def __init__(self, init): + for k, v in six.iteritems(init): + self[k] = v + + +class LogConfig(DictType): + types = LogConfigTypesEnum + + def __init__(self, **kwargs): + log_driver_type = kwargs.get('type', kwargs.get('Type')) + config = kwargs.get('config', kwargs.get('Config')) or {} + + if config and not isinstance(config, dict): + raise ValueError("LogConfig.config must be a dictionary") + + super(LogConfig, self).__init__({ + 'Type': log_driver_type, + 'Config': config + }) + + @property + def type(self): + return self['Type'] + + @type.setter + def type(self, value): + self['Type'] = value + + @property + def config(self): + return self['Config'] + + def set_config_value(self, key, value): + self.config[key] = value + + def unset_config(self, key): + if key in self.config: + del self.config[key] + + +class Ulimit(DictType): + def __init__(self, **kwargs): + name = kwargs.get('name', kwargs.get('Name')) + soft = kwargs.get('soft', kwargs.get('Soft')) + hard = kwargs.get('hard', kwargs.get('Hard')) + if not isinstance(name, six.string_types): + raise ValueError("Ulimit.name must be a string") + if soft and not isinstance(soft, int): + raise ValueError("Ulimit.soft must be an integer") + if hard and not isinstance(hard, int): + raise ValueError("Ulimit.hard must be an integer") + super(Ulimit, self).__init__({ + 'Name': name, + 'Soft': soft, + 'Hard': hard + }) + + @property + def name(self): + return self['Name'] + + @name.setter + def name(self, value): + self['Name'] = value + + @property + def soft(self): + return self.get('Soft') + + @soft.setter + def soft(self, value): + self['Soft'] = value + + @property + def hard(self): + return self.get('Hard') + + @hard.setter + def hard(self, value): + self['Hard'] = value diff --git a/testbed/docker__docker-py/docker/utils/utils.py b/testbed/docker__docker-py/docker/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d4393d584d4b64fd53a7ea1a7ac9decee55f7766 --- /dev/null +++ b/testbed/docker__docker-py/docker/utils/utils.py @@ -0,0 +1,962 @@ +# Copyright 2013 dotCloud inc. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import io +import os +import os.path +import json +import shlex +import tarfile +import tempfile +import warnings +from distutils.version import StrictVersion +from fnmatch import fnmatch +from datetime import datetime + +import requests +import six + +from .. import constants +from .. import errors +from .. import tls +from .types import Ulimit, LogConfig + + +DEFAULT_HTTP_HOST = "127.0.0.1" +DEFAULT_UNIX_SOCKET = "http+unix://var/run/docker.sock" +BYTE_UNITS = { + 'b': 1, + 'k': 1024, + 'm': 1024 * 1024, + 'g': 1024 * 1024 * 1024 +} + + +def create_ipam_pool(subnet=None, iprange=None, gateway=None, + aux_addresses=None): + return { + 'Subnet': subnet, + 'IPRange': iprange, + 'Gateway': gateway, + 'AuxiliaryAddresses': aux_addresses + } + + +def create_ipam_config(driver='default', pool_configs=None): + return { + 'Driver': driver, + 'Config': pool_configs or [] + } + + +def mkbuildcontext(dockerfile): + f = tempfile.NamedTemporaryFile() + t = tarfile.open(mode='w', fileobj=f) + if isinstance(dockerfile, io.StringIO): + dfinfo = tarfile.TarInfo('Dockerfile') + if six.PY3: + raise TypeError('Please use io.BytesIO to create in-memory ' + 'Dockerfiles with Python 3') + else: + dfinfo.size = len(dockerfile.getvalue()) + dockerfile.seek(0) + elif isinstance(dockerfile, io.BytesIO): + dfinfo = tarfile.TarInfo('Dockerfile') + dfinfo.size = len(dockerfile.getvalue()) + dockerfile.seek(0) + else: + dfinfo = t.gettarinfo(fileobj=dockerfile, arcname='Dockerfile') + t.addfile(dfinfo, dockerfile) + t.close() + f.seek(0) + return f + + +def decode_json_header(header): + data = base64.b64decode(header) + if six.PY3: + data = data.decode('utf-8') + return json.loads(data) + + +def tar(path, exclude=None, dockerfile=None, fileobj=None): + if not fileobj: + fileobj = tempfile.NamedTemporaryFile() + t = tarfile.open(mode='w', fileobj=fileobj) + + root = os.path.abspath(path) + exclude = exclude or [] + + for path in sorted(exclude_paths(root, exclude, dockerfile=dockerfile)): + t.add(os.path.join(root, path), arcname=path, recursive=False) + + t.close() + fileobj.seek(0) + return fileobj + + +def exclude_paths(root, patterns, dockerfile=None): + """ + Given a root directory path and a list of .dockerignore patterns, return + an iterator of all paths (both regular files and directories) in the root + directory that do *not* match any of the patterns. + + All paths returned are relative to the root. + """ + if dockerfile is None: + dockerfile = 'Dockerfile' + + exceptions = [p for p in patterns if p.startswith('!')] + + include_patterns = [p[1:] for p in exceptions] + include_patterns += [dockerfile, '.dockerignore'] + + exclude_patterns = list(set(patterns) - set(exceptions)) + + paths = get_paths(root, exclude_patterns, include_patterns, + has_exceptions=len(exceptions) > 0) + + return set(paths).union( + # If the Dockerfile is in a subdirectory that is excluded, get_paths + # will not descend into it and the file will be skipped. This ensures + # it doesn't happen. + set([dockerfile]) + if os.path.exists(os.path.join(root, dockerfile)) else set() + ) + + +def should_include(path, exclude_patterns, include_patterns): + """ + Given a path, a list of exclude patterns, and a list of inclusion patterns: + + 1. Returns True if the path doesn't match any exclusion pattern + 2. Returns False if the path matches an exclusion pattern and doesn't match + an inclusion pattern + 3. Returns true if the path matches an exclusion pattern and matches an + inclusion pattern + """ + for pattern in exclude_patterns: + if match_path(path, pattern): + for pattern in include_patterns: + if match_path(path, pattern): + return True + return False + return True + + +def get_paths(root, exclude_patterns, include_patterns, has_exceptions=False): + paths = [] + + for parent, dirs, files in os.walk(root, topdown=True, followlinks=False): + parent = os.path.relpath(parent, root) + if parent == '.': + parent = '' + + # If exception rules exist, we can't skip recursing into ignored + # directories, as we need to look for exceptions in them. + # + # It may be possible to optimize this further for exception patterns + # that *couldn't* match within ignored directores. + # + # This matches the current docker logic (as of 2015-11-24): + # https://github.com/docker/docker/blob/37ba67bf636b34dc5c0c0265d62a089d0492088f/pkg/archive/archive.go#L555-L557 + + if not has_exceptions: + + # Remove excluded patterns from the list of directories to traverse + # by mutating the dirs we're iterating over. + # This looks strange, but is considered the correct way to skip + # traversal. See https://docs.python.org/2/library/os.html#os.walk + + dirs[:] = [d for d in dirs if + should_include(os.path.join(parent, d), + exclude_patterns, include_patterns)] + + for path in dirs: + if should_include(os.path.join(parent, path), + exclude_patterns, include_patterns): + paths.append(os.path.join(parent, path)) + + for path in files: + if should_include(os.path.join(parent, path), + exclude_patterns, include_patterns): + paths.append(os.path.join(parent, path)) + + return paths + + +def match_path(path, pattern): + pattern = pattern.rstrip('/') + pattern_components = pattern.split('/') + path_components = path.split('/')[:len(pattern_components)] + return fnmatch('/'.join(path_components), pattern) + + +def compare_version(v1, v2): + """Compare docker versions + + >>> v1 = '1.9' + >>> v2 = '1.10' + >>> compare_version(v1, v2) + 1 + >>> compare_version(v2, v1) + -1 + >>> compare_version(v2, v2) + 0 + """ + s1 = StrictVersion(v1) + s2 = StrictVersion(v2) + if s1 == s2: + return 0 + elif s1 > s2: + return -1 + else: + return 1 + + +def version_lt(v1, v2): + return compare_version(v1, v2) > 0 + + +def version_gte(v1, v2): + return not version_lt(v1, v2) + + +def ping_registry(url): + warnings.warn( + 'The `ping_registry` method is deprecated and will be removed.', + DeprecationWarning + ) + + return ping(url + '/v2/', [401]) or ping(url + '/v1/_ping') + + +def ping(url, valid_4xx_statuses=None): + try: + res = requests.get(url, timeout=3) + except Exception: + return False + else: + # We don't send yet auth headers + # and a v2 registry will respond with status 401 + return ( + res.status_code < 400 or + (valid_4xx_statuses and res.status_code in valid_4xx_statuses) + ) + + +def _convert_port_binding(binding): + result = {'HostIp': '', 'HostPort': ''} + if isinstance(binding, tuple): + if len(binding) == 2: + result['HostPort'] = binding[1] + result['HostIp'] = binding[0] + elif isinstance(binding[0], six.string_types): + result['HostIp'] = binding[0] + else: + result['HostPort'] = binding[0] + elif isinstance(binding, dict): + if 'HostPort' in binding: + result['HostPort'] = binding['HostPort'] + if 'HostIp' in binding: + result['HostIp'] = binding['HostIp'] + else: + raise ValueError(binding) + else: + result['HostPort'] = binding + + if result['HostPort'] is None: + result['HostPort'] = '' + else: + result['HostPort'] = str(result['HostPort']) + + return result + + +def convert_port_bindings(port_bindings): + result = {} + for k, v in six.iteritems(port_bindings): + key = str(k) + if '/' not in key: + key += '/tcp' + if isinstance(v, list): + result[key] = [_convert_port_binding(binding) for binding in v] + else: + result[key] = [_convert_port_binding(v)] + return result + + +def convert_volume_binds(binds): + if isinstance(binds, list): + return binds + + result = [] + for k, v in binds.items(): + if isinstance(k, six.binary_type): + k = k.decode('utf-8') + + if isinstance(v, dict): + if 'ro' in v and 'mode' in v: + raise ValueError( + 'Binding cannot contain both "ro" and "mode": {}' + .format(repr(v)) + ) + + bind = v['bind'] + if isinstance(bind, six.binary_type): + bind = bind.decode('utf-8') + + if 'ro' in v: + mode = 'ro' if v['ro'] else 'rw' + elif 'mode' in v: + mode = v['mode'] + else: + mode = 'rw' + + result.append( + six.text_type('{0}:{1}:{2}').format(k, bind, mode) + ) + else: + if isinstance(v, six.binary_type): + v = v.decode('utf-8') + result.append( + six.text_type('{0}:{1}:rw').format(k, v) + ) + return result + + +def parse_repository_tag(repo_name): + parts = repo_name.rsplit('@', 1) + if len(parts) == 2: + return tuple(parts) + parts = repo_name.rsplit(':', 1) + if len(parts) == 2 and '/' not in parts[1]: + return tuple(parts) + return repo_name, None + + +# Based on utils.go:ParseHost http://tinyurl.com/nkahcfh +# fd:// protocol unsupported (for obvious reasons) +# Added support for http and https +# Protocol translation: tcp -> http, unix -> http+unix +def parse_host(addr, platform=None, tls=False): + proto = "http+unix" + host = DEFAULT_HTTP_HOST + port = None + path = '' + + if not addr and platform == 'win32': + addr = '{0}:{1}'.format(DEFAULT_HTTP_HOST, 2375) + + if not addr or addr.strip() == 'unix://': + return DEFAULT_UNIX_SOCKET + + addr = addr.strip() + if addr.startswith('http://'): + addr = addr.replace('http://', 'tcp://') + if addr.startswith('http+unix://'): + addr = addr.replace('http+unix://', 'unix://') + + if addr == 'tcp://': + raise errors.DockerException( + "Invalid bind address format: {0}".format(addr)) + elif addr.startswith('unix://'): + addr = addr[7:] + elif addr.startswith('tcp://'): + proto = "http" + addr = addr[6:] + elif addr.startswith('https://'): + proto = "https" + addr = addr[8:] + elif addr.startswith('fd://'): + raise errors.DockerException("fd protocol is not implemented") + else: + if "://" in addr: + raise errors.DockerException( + "Invalid bind address protocol: {0}".format(addr) + ) + proto = "https" if tls else "http" + + if proto != "http+unix" and ":" in addr: + host_parts = addr.split(':') + if len(host_parts) != 2: + raise errors.DockerException( + "Invalid bind address format: {0}".format(addr) + ) + if host_parts[0]: + host = host_parts[0] + + port = host_parts[1] + if '/' in port: + port, path = port.split('/', 1) + path = '/{0}'.format(path) + try: + port = int(port) + except Exception: + raise errors.DockerException( + "Invalid port: {0}".format(addr) + ) + + elif proto in ("http", "https") and ':' not in addr: + raise errors.DockerException( + "Bind address needs a port: {0}".format(addr)) + else: + host = addr + + if proto == "http+unix": + return "{0}://{1}".format(proto, host) + return "{0}://{1}:{2}{3}".format(proto, host, port, path) + + +def parse_devices(devices): + device_list = [] + for device in devices: + if isinstance(device, dict): + device_list.append(device) + continue + if not isinstance(device, six.string_types): + raise errors.DockerException( + 'Invalid device type {0}'.format(type(device)) + ) + device_mapping = device.split(':') + if device_mapping: + path_on_host = device_mapping[0] + if len(device_mapping) > 1: + path_in_container = device_mapping[1] + else: + path_in_container = path_on_host + if len(device_mapping) > 2: + permissions = device_mapping[2] + else: + permissions = 'rwm' + device_list.append({ + 'PathOnHost': path_on_host, + 'PathInContainer': path_in_container, + 'CgroupPermissions': permissions + }) + return device_list + + +def kwargs_from_env(ssl_version=None, assert_hostname=None): + host = os.environ.get('DOCKER_HOST') + + # empty string for cert path is the same as unset. + cert_path = os.environ.get('DOCKER_CERT_PATH') or None + + # empty string for tls verify counts as "false". + # Any value or 'unset' counts as true. + tls_verify = os.environ.get('DOCKER_TLS_VERIFY') + if tls_verify == '': + tls_verify = False + else: + tls_verify = tls_verify is not None + enable_tls = cert_path or tls_verify + + params = {} + + if host: + params['base_url'] = ( + host.replace('tcp://', 'https://') if enable_tls else host + ) + + if not enable_tls: + return params + + if not cert_path: + cert_path = os.path.join(os.path.expanduser('~'), '.docker') + + if not tls_verify and assert_hostname is None: + # assert_hostname is a subset of TLS verification, + # so if it's not set already then set it to false. + assert_hostname = False + + params['tls'] = tls.TLSConfig( + client_cert=(os.path.join(cert_path, 'cert.pem'), + os.path.join(cert_path, 'key.pem')), + ca_cert=os.path.join(cert_path, 'ca.pem'), + verify=tls_verify, + ssl_version=ssl_version, + assert_hostname=assert_hostname, + ) + + return params + + +def convert_filters(filters): + result = {} + for k, v in six.iteritems(filters): + if isinstance(v, bool): + v = 'true' if v else 'false' + if not isinstance(v, list): + v = [v, ] + result[k] = v + return json.dumps(result) + + +def datetime_to_timestamp(dt): + """Convert a UTC datetime to a Unix timestamp""" + delta = dt - datetime.utcfromtimestamp(0) + return delta.seconds + delta.days * 24 * 3600 + + +def longint(n): + if six.PY3: + return int(n) + return long(n) + + +def parse_bytes(s): + if isinstance(s, six.integer_types + (float,)): + return s + if len(s) == 0: + return 0 + + if s[-2:-1].isalpha() and s[-1].isalpha(): + if s[-1] == "b" or s[-1] == "B": + s = s[:-1] + units = BYTE_UNITS + suffix = s[-1].lower() + + # Check if the variable is a string representation of an int + # without a units part. Assuming that the units are bytes. + if suffix.isdigit(): + digits_part = s + suffix = 'b' + else: + digits_part = s[:-1] + + if suffix in units.keys() or suffix.isdigit(): + try: + digits = longint(digits_part) + except ValueError: + raise errors.DockerException( + 'Failed converting the string value for memory ({0}) to' + ' an integer.'.format(digits_part) + ) + + # Reconvert to long for the final result + s = longint(digits * units[suffix]) + else: + raise errors.DockerException( + 'The specified value for memory ({0}) should specify the' + ' units. The postfix should be one of the `b` `k` `m` `g`' + ' characters'.format(s) + ) + + return s + + +def host_config_type_error(param, param_value, expected): + error_msg = 'Invalid type for {0} param: expected {1} but found {2}' + return TypeError(error_msg.format(param, expected, type(param_value))) + + +def host_config_version_error(param, version, less_than=True): + operator = '<' if less_than else '>' + error_msg = '{0} param is not supported in API versions {1} {2}' + return errors.InvalidVersion(error_msg.format(param, operator, version)) + + +def host_config_value_error(param, param_value): + error_msg = 'Invalid value for {0} param: {1}' + return ValueError(error_msg.format(param, param_value)) + + +def create_host_config(binds=None, port_bindings=None, lxc_conf=None, + publish_all_ports=False, links=None, privileged=False, + dns=None, dns_search=None, volumes_from=None, + network_mode=None, restart_policy=None, cap_add=None, + cap_drop=None, devices=None, extra_hosts=None, + read_only=None, pid_mode=None, ipc_mode=None, + security_opt=None, ulimits=None, log_config=None, + mem_limit=None, memswap_limit=None, mem_swappiness=None, + cgroup_parent=None, group_add=None, cpu_quota=None, + cpu_period=None, oom_kill_disable=False, shm_size=None, + version=None): + + host_config = {} + + if not version: + warnings.warn( + 'docker.utils.create_host_config() is deprecated. Please use ' + 'Client.create_host_config() instead.' + ) + version = constants.DEFAULT_DOCKER_API_VERSION + + if mem_limit is not None: + host_config['Memory'] = parse_bytes(mem_limit) + + if memswap_limit is not None: + host_config['MemorySwap'] = parse_bytes(memswap_limit) + + if mem_swappiness is not None: + if version_lt(version, '1.20'): + raise host_config_version_error('mem_swappiness', '1.20') + if not isinstance(mem_swappiness, int): + raise host_config_type_error( + 'mem_swappiness', mem_swappiness, 'int' + ) + + host_config['MemorySwappiness'] = mem_swappiness + + if shm_size is not None: + if isinstance(shm_size, six.string_types): + shm_size = parse_bytes(shm_size) + + host_config['ShmSize'] = shm_size + + if pid_mode not in (None, 'host'): + raise host_config_value_error('pid_mode', pid_mode) + elif pid_mode: + host_config['PidMode'] = pid_mode + + if ipc_mode: + host_config['IpcMode'] = ipc_mode + + if privileged: + host_config['Privileged'] = privileged + + if oom_kill_disable: + if version_lt(version, '1.20'): + raise host_config_version_error('oom_kill_disable', '1.19') + + host_config['OomKillDisable'] = oom_kill_disable + + if publish_all_ports: + host_config['PublishAllPorts'] = publish_all_ports + + if read_only is not None: + host_config['ReadonlyRootfs'] = read_only + + if dns_search: + host_config['DnsSearch'] = dns_search + + if network_mode: + host_config['NetworkMode'] = network_mode + elif network_mode is None and compare_version('1.19', version) > 0: + host_config['NetworkMode'] = 'default' + + if restart_policy: + if not isinstance(restart_policy, dict): + raise host_config_type_error( + 'restart_policy', restart_policy, 'dict' + ) + + host_config['RestartPolicy'] = restart_policy + + if cap_add: + host_config['CapAdd'] = cap_add + + if cap_drop: + host_config['CapDrop'] = cap_drop + + if devices: + host_config['Devices'] = parse_devices(devices) + + if group_add: + if version_lt(version, '1.20'): + raise host_config_version_error('group_add', '1.20') + + host_config['GroupAdd'] = [six.text_type(grp) for grp in group_add] + + if dns is not None: + host_config['Dns'] = dns + + if security_opt is not None: + if not isinstance(security_opt, list): + raise host_config_type_error('security_opt', security_opt, 'list') + + host_config['SecurityOpt'] = security_opt + + if volumes_from is not None: + if isinstance(volumes_from, six.string_types): + volumes_from = volumes_from.split(',') + + host_config['VolumesFrom'] = volumes_from + + if binds is not None: + host_config['Binds'] = convert_volume_binds(binds) + + if port_bindings is not None: + host_config['PortBindings'] = convert_port_bindings(port_bindings) + + if extra_hosts is not None: + if isinstance(extra_hosts, dict): + extra_hosts = [ + '{0}:{1}'.format(k, v) + for k, v in sorted(six.iteritems(extra_hosts)) + ] + + host_config['ExtraHosts'] = extra_hosts + + if links is not None: + host_config['Links'] = normalize_links(links) + + if isinstance(lxc_conf, dict): + formatted = [] + for k, v in six.iteritems(lxc_conf): + formatted.append({'Key': k, 'Value': str(v)}) + lxc_conf = formatted + + if lxc_conf is not None: + host_config['LxcConf'] = lxc_conf + + if cgroup_parent is not None: + host_config['CgroupParent'] = cgroup_parent + + if ulimits is not None: + if not isinstance(ulimits, list): + raise host_config_type_error('ulimits', ulimits, 'list') + host_config['Ulimits'] = [] + for l in ulimits: + if not isinstance(l, Ulimit): + l = Ulimit(**l) + host_config['Ulimits'].append(l) + + if log_config is not None: + if not isinstance(log_config, LogConfig): + if not isinstance(log_config, dict): + raise host_config_type_error( + 'log_config', log_config, 'LogConfig' + ) + log_config = LogConfig(**log_config) + + host_config['LogConfig'] = log_config + + if cpu_quota: + if not isinstance(cpu_quota, int): + raise host_config_type_error('cpu_quota', cpu_quota, 'int') + if version_lt(version, '1.19'): + raise host_config_version_error('cpu_quota', '1.19') + + host_config['CpuQuota'] = cpu_quota + + if cpu_period: + if not isinstance(cpu_period, int): + raise host_config_type_error('cpu_period', cpu_period, 'int') + if version_lt(version, '1.19'): + raise host_config_version_error('cpu_period', '1.19') + + host_config['CpuPeriod'] = cpu_period + + return host_config + + +def normalize_links(links): + if isinstance(links, dict): + links = six.iteritems(links) + + return ['{0}:{1}'.format(k, v) for k, v in sorted(links)] + + +def create_networking_config(endpoints_config=None): + networking_config = {} + + if endpoints_config: + networking_config["EndpointsConfig"] = endpoints_config + + return networking_config + + +def create_endpoint_config(version, aliases=None, links=None): + endpoint_config = {} + + if aliases: + if version_lt(version, '1.22'): + raise host_config_version_error('endpoint_config.aliases', '1.22') + endpoint_config["Aliases"] = aliases + + if links: + if version_lt(version, '1.22'): + raise host_config_version_error('endpoint_config.links', '1.22') + endpoint_config["Links"] = normalize_links(links) + + return endpoint_config + + +def parse_env_file(env_file): + """ + Reads a line-separated environment file. + The format of each line should be "key=value". + """ + environment = {} + + with open(env_file, 'r') as f: + for line in f: + + if line[0] == '#': + continue + + parse_line = line.strip().split('=') + if len(parse_line) == 2: + k, v = parse_line + environment[k] = v + else: + raise errors.DockerException( + 'Invalid line in environment file {0}:\n{1}'.format( + env_file, line)) + + return environment + + +def split_command(command): + if six.PY2 and not isinstance(command, six.binary_type): + command = command.encode('utf-8') + return shlex.split(command) + + +def format_environment(environment): + def format_env(key, value): + if value is None: + return key + return '{key}={value}'.format(key=key, value=value) + return [format_env(*var) for var in six.iteritems(environment)] + + +def create_container_config( + version, image, command, hostname=None, user=None, detach=False, + stdin_open=False, tty=False, mem_limit=None, ports=None, environment=None, + dns=None, volumes=None, volumes_from=None, network_disabled=False, + entrypoint=None, cpu_shares=None, working_dir=None, domainname=None, + memswap_limit=None, cpuset=None, host_config=None, mac_address=None, + labels=None, volume_driver=None, stop_signal=None, networking_config=None, +): + if isinstance(command, six.string_types): + command = split_command(command) + + if isinstance(entrypoint, six.string_types): + entrypoint = split_command(entrypoint) + + if isinstance(environment, dict): + environment = format_environment(environment) + + if labels is not None and compare_version('1.18', version) < 0: + raise errors.InvalidVersion( + 'labels were only introduced in API version 1.18' + ) + + if stop_signal is not None and compare_version('1.21', version) < 0: + raise errors.InvalidVersion( + 'stop_signal was only introduced in API version 1.21' + ) + + if compare_version('1.19', version) < 0: + if volume_driver is not None: + raise errors.InvalidVersion( + 'Volume drivers were only introduced in API version 1.19' + ) + mem_limit = mem_limit if mem_limit is not None else 0 + memswap_limit = memswap_limit if memswap_limit is not None else 0 + else: + if mem_limit is not None: + raise errors.InvalidVersion( + 'mem_limit has been moved to host_config in API version 1.19' + ) + + if memswap_limit is not None: + raise errors.InvalidVersion( + 'memswap_limit has been moved to host_config in API ' + 'version 1.19' + ) + + if isinstance(labels, list): + labels = dict((lbl, six.text_type('')) for lbl in labels) + + if mem_limit is not None: + mem_limit = parse_bytes(mem_limit) + if memswap_limit is not None: + memswap_limit = parse_bytes(memswap_limit) + + if isinstance(ports, list): + exposed_ports = {} + for port_definition in ports: + port = port_definition + proto = 'tcp' + if isinstance(port_definition, tuple): + if len(port_definition) == 2: + proto = port_definition[1] + port = port_definition[0] + exposed_ports['{0}/{1}'.format(port, proto)] = {} + ports = exposed_ports + + if isinstance(volumes, six.string_types): + volumes = [volumes, ] + + if isinstance(volumes, list): + volumes_dict = {} + for vol in volumes: + volumes_dict[vol] = {} + volumes = volumes_dict + + if volumes_from: + if not isinstance(volumes_from, six.string_types): + volumes_from = ','.join(volumes_from) + else: + # Force None, an empty list or dict causes client.start to fail + volumes_from = None + + attach_stdin = False + attach_stdout = False + attach_stderr = False + stdin_once = False + + if not detach: + attach_stdout = True + attach_stderr = True + + if stdin_open: + attach_stdin = True + stdin_once = True + + if compare_version('1.10', version) >= 0: + message = ('{0!r} parameter has no effect on create_container().' + ' It has been moved to host_config') + if dns is not None: + raise errors.InvalidVersion(message.format('dns')) + if volumes_from is not None: + raise errors.InvalidVersion(message.format('volumes_from')) + + return { + 'Hostname': hostname, + 'Domainname': domainname, + 'ExposedPorts': ports, + 'User': six.text_type(user) if user else None, + 'Tty': tty, + 'OpenStdin': stdin_open, + 'StdinOnce': stdin_once, + 'Memory': mem_limit, + 'AttachStdin': attach_stdin, + 'AttachStdout': attach_stdout, + 'AttachStderr': attach_stderr, + 'Env': environment, + 'Cmd': command, + 'Dns': dns, + 'Image': image, + 'Volumes': volumes, + 'VolumesFrom': volumes_from, + 'NetworkDisabled': network_disabled, + 'Entrypoint': entrypoint, + 'CpuShares': cpu_shares, + 'Cpuset': cpuset, + 'CpusetCpus': cpuset, + 'WorkingDir': working_dir, + 'MemorySwap': memswap_limit, + 'HostConfig': host_config, + 'NetworkingConfig': networking_config, + 'MacAddress': mac_address, + 'Labels': labels, + 'VolumeDriver': volume_driver, + 'StopSignal': stop_signal + } diff --git a/testbed/docker__docker-py/docker/version.py b/testbed/docker__docker-py/docker/version.py new file mode 100644 index 0000000000000000000000000000000000000000..4efc6ebe08bf2ba24585f1f850e6b7519fdeb2a3 --- /dev/null +++ b/testbed/docker__docker-py/docker/version.py @@ -0,0 +1,2 @@ +version = "1.8.0-dev" +version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/testbed/docker__docker-py/docs-requirements.txt b/testbed/docker__docker-py/docs-requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..abc8d72db6b7dfb1fccc270244a4ef27bb582a4a --- /dev/null +++ b/testbed/docker__docker-py/docs-requirements.txt @@ -0,0 +1 @@ +mkdocs==0.9 diff --git a/testbed/docker__docker-py/docs/api.md b/testbed/docker__docker-py/docs/api.md new file mode 100644 index 0000000000000000000000000000000000000000..3393e68ef1914e9b407598ba4aa12f4b78462db3 --- /dev/null +++ b/testbed/docker__docker-py/docs/api.md @@ -0,0 +1,1089 @@ +# Client API + +To instantiate a `Client` class that will allow you to communicate with a +Docker daemon, simply do: + +```python +>>> from docker import Client +>>> cli = Client(base_url='unix://var/run/docker.sock') +``` + +**Params**: + +* base_url (str): Refers to the protocol+hostname+port where the Docker server +is hosted. +* version (str): The version of the API the client will use. Specify `'auto'` + to use the API version provided by the server. +* timeout (int): The HTTP request timeout, in seconds. +* tls (bool or [TLSConfig](tls.md#TLSConfig)): Equivalent CLI options: `docker --tls ...` + +**** + +## attach + +The `.logs()` function is a wrapper around this method, which you can use +instead if you want to fetch/stream container output without first retrieving +the entire backlog. + +**Params**: + +* container (str): The container to attach to +* stdout (bool): Get STDOUT +* stderr (bool): Get STDERR +* stream (bool): Return an iterator +* logs (bool): Get all previous output + +**Returns** (generator or str): The logs or output for the image + +## build + +Similar to the `docker build` command. Either `path` or `fileobj` needs to be +set. `path` can be a local path (to a directory containing a Dockerfile) or a +remote URL. `fileobj` must be a readable file-like object to a Dockerfile. + +If you have a tar file for the Docker build context (including a Dockerfile) +already, pass a readable file-like object to `fileobj` and also pass +`custom_context=True`. If the stream is compressed also, set `encoding` to the +correct value (e.g `gzip`). + +**Params**: + +* path (str): Path to the directory containing the Dockerfile +* tag (str): A tag to add to the final image +* quiet (bool): Whether to return the status +* fileobj: A file object to use as the Dockerfile. (Or a file-like object) +* nocache (bool): Don't use the cache when set to `True` +* rm (bool): Remove intermediate containers. The `docker build` command now + defaults to ``--rm=true``, but we have kept the old default of `False` + to preserve backward compatibility +* stream (bool): *Deprecated for API version > 1.8 (always True)*. + Return a blocking generator you can iterate over to retrieve build output as + it happens +* timeout (int): HTTP timeout +* custom_context (bool): Optional if using `fileobj` +* encoding (str): The encoding for a stream. Set to `gzip` for compressing +* pull (bool): Downloads any updates to the FROM image in Dockerfiles +* forcerm (bool): Always remove intermediate containers, even after unsuccessful builds +* dockerfile (str): path within the build context to the Dockerfile +* container_limits (dict): A dictionary of limits applied to each container + created by the build process. Valid keys: + - memory (int): set memory limit for build + - memswap (int): Total memory (memory + swap), -1 to disable swap + - cpushares (int): CPU shares (relative weight) + - cpusetcpus (str): CPUs in which to allow execution, e.g., `"0-3"`, `"0,1"` +* decode (bool): If set to `True`, the returned stream will be decoded into + dicts on the fly. Default `False`. + +**Returns** (generator): A generator for the build output + +```python +>>> from io import BytesIO +>>> from docker import Client +>>> dockerfile = ''' +... # Shared Volume +... FROM busybox:buildroot-2014.02 +... MAINTAINER first last, first.last@yourdomain.com +... VOLUME /data +... CMD ["/bin/sh"] +... ''' +>>> f = BytesIO(dockerfile.encode('utf-8')) +>>> cli = Client(base_url='tcp://127.0.0.1:2375') +>>> response = [line for line in cli.build( +... fileobj=f, rm=True, tag='yourname/volume' +... )] +>>> response +['{"stream":" ---\\u003e a9eb17255234\\n"}', +'{"stream":"Step 1 : MAINTAINER first last, first.last@yourdomain.com\\n"}', +'{"stream":" ---\\u003e Running in 08787d0ee8b1\\n"}', +'{"stream":" ---\\u003e 23e5e66a4494\\n"}', +'{"stream":"Removing intermediate container 08787d0ee8b1\\n"}', +'{"stream":"Step 2 : VOLUME /data\\n"}', +'{"stream":" ---\\u003e Running in abdc1e6896c6\\n"}', +'{"stream":" ---\\u003e 713bca62012e\\n"}', +'{"stream":"Removing intermediate container abdc1e6896c6\\n"}', +'{"stream":"Step 3 : CMD [\\"/bin/sh\\"]\\n"}', +'{"stream":" ---\\u003e Running in dba30f2a1a7e\\n"}', +'{"stream":" ---\\u003e 032b8b2855fc\\n"}', +'{"stream":"Removing intermediate container dba30f2a1a7e\\n"}', +'{"stream":"Successfully built 032b8b2855fc\\n"}'] +``` + +**Raises:** [TypeError]( +https://docs.python.org/3.4/library/exceptions.html#TypeError) if `path` nor +`fileobj` are specified + +## commit + +Identical to the `docker commit` command. + +**Params**: + +* container (str): The image hash of the container +* repository (str): The repository to push the image to +* tag (str): The tag to push +* message (str): A commit message +* author (str): The name of the author +* conf (dict): The configuration for the container. See the [Docker remote api]( +https://docs.docker.com/reference/api/docker_remote_api/) for full details. + +## containers + +List containers. Identical to the `docker ps` command. + +**Params**: + +* quiet (bool): Only display numeric Ids +* all (bool): Show all containers. Only running containers are shown by default +* trunc (bool): Truncate output +* latest (bool): Show only the latest created container, include non-running +ones. +* since (str): Show only containers created since Id or Name, include +non-running ones +* before (str): Show only container created before Id or Name, include +non-running ones +* limit (int): Show `limit` last created containers, include non-running ones +* size (bool): Display sizes +* filters (dict): Filters to be processed on the image list. Available filters: + - `exited` (int): Only containers with specified exit code + - `status` (str): One of `restarting`, `running`, `paused`, `exited` + - `label` (str): format either `"key"` or `"key=value"` + +**Returns** (dict): The system's containers + +```python +>>> from docker import Client +>>> cli = Client(base_url='tcp://127.0.0.1:2375') +>>> cli.containers() +[{'Command': '/bin/sleep 30', + 'Created': 1412574844, + 'Id': '6e276c9e6e5759e12a6a9214efec6439f80b4f37618e1a6547f28a3da34db07a', + 'Image': 'busybox:buildroot-2014.02', + 'Names': ['/grave_mayer'], + 'Ports': [], + 'Status': 'Up 1 seconds'}] +``` + +## connect_container_to_network + +Connect a container to a network. + +**Params**: + +* container (str): container-id/name to be connected to the network +* net_id (str): network id + +## copy +Identical to the `docker cp` command. Get files/folders from the container. +**Deprecated for API version >= 1.20** – Consider using +[`get_archive`](#get_archive) **instead.** + +**Params**: + +* container (str): The container to copy from +* resource (str): The path within the container + +**Returns** (str): The contents of the file as a string + +## create_container + +Creates a container that can then be `.start()` ed. Parameters are similar to +those for the `docker run` command except it doesn't support the attach +options (`-a`). + +See [Port bindings](port-bindings.md) and [Using volumes](volumes.md) for more +information on how to create port bindings and volume mappings. + +The `mem_limit` variable accepts float values (which represent the memory limit +of the created container in bytes) or a string with a units identification char +('100000b', '1000k', '128m', '1g'). If a string is specified without a units +character, bytes are assumed as an intended unit. + +`volumes_from` and `dns` arguments raise [TypeError]( +https://docs.python.org/3.4/library/exceptions.html#TypeError) exception if +they are used against v1.10 and above of the Docker remote API. Those +arguments should be passed as part of the `host_config` dictionary. + +**Params**: + +* image (str): The image to run +* command (str or list): The command to be run in the container +* hostname (str): Optional hostname for the container +* user (str or int): Username or UID +* detach (bool): Detached mode: run container in the background and print new +container Id +* stdin_open (bool): Keep STDIN open even if not attached +* tty (bool): Allocate a pseudo-TTY +* mem_limit (float or str): Memory limit (format: [number][optional unit], +where unit = b, k, m, or g) +* ports (list of ints): A list of port numbers +* environment (dict or list): A dictionary or a list of strings in the +following format `["PASSWORD=xxx"]` or `{"PASSWORD": "xxx"}`. +* dns (list): DNS name servers +* volumes (str or list): +* volumes_from (str or list): List of container names or Ids to get volumes +from. Optionally a single string joining container id's with commas +* network_disabled (bool): Disable networking +* name (str): A name for the container +* entrypoint (str or list): An entrypoint +* cpu_shares (int): CPU shares (relative weight) +* working_dir (str): Path to the working directory +* domainname (str or list): Set custom DNS search domains +* memswap_limit (int): +* host_config (dict): A [HostConfig](hostconfig.md) dictionary +* mac_address (str): The Mac Address to assign the container +* labels (dict or list): A dictionary of name-value labels (e.g. `{"label1": "value1", "label2": "value2"}`) or a list of names of labels to set with empty values (e.g. `["label1", "label2"]`) +* volume_driver (str): The name of a volume driver/plugin. +* stop_signal (str): The stop signal to use to stop the container (e.g. `SIGINT`). + +**Returns** (dict): A dictionary with an image 'Id' key and a 'Warnings' key. + +```python +>>> from docker import Client +>>> cli = Client(base_url='tcp://127.0.0.1:2375') +>>> container = cli.create_container(image='busybox:latest', command='/bin/sleep 30') +>>> print(container) +{'Id': '8a61192da2b3bb2d922875585e29b74ec0dc4e0117fcbf84c962204e97564cd7', + 'Warnings': None} +``` + +### docker.utils.parse_env_file + +A utility for parsing an environment file. + +The expected format of the file is as follows: + +``` +USERNAME=jdoe +PASSWORD=secret +``` + +The utility can be used as follows: + +```python +>>> import docker.utils +>>> my_envs = docker.utils.parse_env_file('/path/to/file') +>>> docker.utils.create_container_config('1.18', '_mongodb', 'foobar', environment=my_envs) +``` + +You can now use this with 'environment' for `create_container`. + + +## create_network + +Create a network, similar to the `docker network create` command. + +**Params**: + +* name (str): Name of the network +* driver (str): Name of the driver used to create the network + +* options (dict): Driver options as a key-value dictionary + +**Returns** (dict): The created network reference object + +## create_volume + +Create and register a named volume + +**Params**: + +* name (str): Name of the volume +* driver (str): Name of the driver used to create the volume +* driver_opts (dict): Driver options as a key-value dictionary + +**Returns** (dict): The created volume reference object + +```python +>>> from docker import Client +>>> cli = Client() +>>> volume = cli.create_volume( + name='foobar', driver='local', driver_opts={'foo': 'bar', 'baz': 'false'} +) +>>> print(volume) +{u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data', u'Driver': u'local', u'Name': u'foobar'} +``` + +## diff + +Inspect changes on a container's filesystem. + +**Params**: + +* container (str): The container to diff + +**Returns** (str): + +## disconnect_container_from_network + +**Params**: + +* container (str): container-id/name to be disconnected from a network +* net_id (str): network id + +## events + +Identical to the `docker events` command: get real time events from the server. The `events` +function return a blocking generator you can iterate over to retrieve events as they happen. + +**Params**: + +* since (UTC datetime or int): get events from this point +* until (UTC datetime or int): get events until this point +* filters (dict): filter the events by event time, container or image +* decode (bool): If set to true, stream will be decoded into dicts on the + fly. False by default. + +**Returns** (generator): + +```python +{u'status': u'start', + u'from': u'image/with:tag', + u'id': u'container-id', + u'time': 1423339459} +``` + +## execute + +This command is deprecated for docker-py >= 1.2.0 ; use `exec_create` and +`exec_start` instead. + +## exec_create + +Sets up an exec instance in a running container. + +**Params**: + +* container (str): Target container where exec instance will be created +* cmd (str or list): Command to be executed +* stdout (bool): Attach to stdout of the exec command if true. Default: True +* stderr (bool): Attach to stderr of the exec command if true. Default: True +* since (UTC datetime or int): Output logs from this timestamp. Default: `None` (all logs are given) +* tty (bool): Allocate a pseudo-TTY. Default: False +* user (str): User to execute command as. Default: root + +**Returns** (dict): A dictionary with an exec 'Id' key. + + +## exec_inspect + +Return low-level information about an exec command. + +**Params**: + +* exec_id (str): ID of the exec instance + +**Returns** (dict): Dictionary of values returned by the endpoint. + + +## exec_resize + +Resize the tty session used by the specified exec command. + +**Params**: + +* exec_id (str): ID of the exec instance +* height (int): Height of tty session +* width (int): Width of tty session + +## exec_start + +Start a previously set up exec instance. + +**Params**: + +* exec_id (str): ID of the exec instance +* detach (bool): If true, detach from the exec command. Default: False +* tty (bool): Allocate a pseudo-TTY. Default: False +* stream (bool): Stream response data. Default: False + +**Returns** (generator or str): If `stream=True`, a generator yielding response +chunks. A string containing response data otherwise. + +## export + +Export the contents of a filesystem as a tar archive to STDOUT. + +**Params**: + +* container (str): The container to export + +**Returns** (str): The filesystem tar archive as a str + +## get_archive + +Retrieve a file or folder from a container in the form of a tar archive. + +**Params**: + +* container (str): The container where the file is located +* path (str): Path to the file or folder to retrieve + +**Returns** (tuple): First element is a raw tar data stream. Second element is +a dict containing `stat` information on the specified `path`. + +```python +>>> import docker +>>> cli = docker.Client() +>>> ctnr = cli.create_container('busybox', 'true') +>>> strm, stat = cli.get_archive(ctnr, '/bin/sh') +>>> print(stat) +{u'linkTarget': u'', u'mode': 493, u'mtime': u'2015-09-16T12:34:23-07:00', u'name': u'sh', u'size': 962860} +``` + +## get_image + +Get an image from the docker daemon. Similar to the `docker save` command. + +**Params**: + +* image (str): Image name to get + +**Returns** (urllib3.response.HTTPResponse object): The response from the docker daemon + +An example of how to get (save) an image to a file. +```python +>>> from docker import Client +>>> cli = Client(base_url='unix://var/run/docker.sock') +>>> image = cli.get_image(“fedora:latest”) +>>> image_tar = open(‘/tmp/fedora-latest.tar’,’w’) +>>> image_tar.write(image.data) +>>> image_tar.close() +``` + +## history + +Show the history of an image. + +**Params**: + +* image (str): The image to show history for + +**Returns** (str): The history of the image + +## images + +List images. Identical to the `docker images` command. + +**Params**: + +* name (str): Only show images belonging to the repository `name` +* quiet (bool): Only show numeric Ids. Returns a list +* all (bool): Show all images (by default filter out the intermediate image +layers) +* filters (dict): Filters to be processed on the image list. Available filters: + - `dangling` (bool) + - `label` (str): format either `"key"` or `"key=value"` + +**Returns** (dict or list): A list if `quiet=True`, otherwise a dict. + +```python +[{'Created': 1401926735, +'Id': 'a9eb172552348a9a49180694790b33a1097f546456d041b6e82e4d7716ddb721', +'ParentId': '120e218dd395ec314e7b6249f39d2853911b3d6def6ea164ae05722649f34b16', +'RepoTags': ['busybox:buildroot-2014.02', 'busybox:latest'], +'Size': 0, +'VirtualSize': 2433303}, +... +``` + +## import_image + +Similar to the `docker import` command. + +If `src` is a string or unicode string, it will first be treated as a path to +a tarball on the local system. If there is an error reading from that file, +src will be treated as a URL instead to fetch the image from. You can also pass +an open file handle as 'src', in which case the data will be read from that +file. + +If `src` is unset but `image` is set, the `image` parameter will be taken as +the name of an existing image to import from. + +**Params**: + +* src (str or file): Path to tarfile, URL, or file-like object +* repository (str): The repository to create +* tag (str): The tag to apply +* image (str): Use another image like the `FROM` Dockerfile parameter + +## import_image_from_data + +Like `.import_image()`, but allows importing in-memory bytes data. + +**Params**: + +* data (bytes collection): Bytes collection containing valid tar data +* repository (str): The repository to create +* tag (str): The tag to apply + +## import_image_from_file + +Like `.import_image()`, but only supports importing from a tar file on +disk. If the file doesn't exist it will raise `IOError`. + +**Params**: + +* filename (str): Full path to a tar file. +* repository (str): The repository to create +* tag (str): The tag to apply + +## import_image_from_url + +Like `.import_image()`, but only supports importing from a URL. + +**Params**: + +* url (str): A URL pointing to a tar file. +* repository (str): The repository to create +* tag (str): The tag to apply + +## import_image_from_image + +Like `.import_image()`, but only supports importing from another image, +like the `FROM` Dockerfile parameter. + +**Params**: + +* image (str): Image name to import from +* repository (str): The repository to create +* tag (str): The tag to apply + +## info + +Display system-wide information. Identical to the `docker info` command. + +**Returns** (dict): The info as a dict + +``` +>>> from docker import Client +>>> cli = Client(base_url='tcp://127.0.0.1:2375') +>>> cli.info() +{'Containers': 3, + 'Debug': 1, + 'Driver': 'aufs', + 'DriverStatus': [['Root Dir', '/mnt/sda1/var/lib/docker/aufs'], + ['Dirs', '225']], + 'ExecutionDriver': 'native-0.2', + 'IPv4Forwarding': 1, + 'Images': 219, + 'IndexServerAddress': 'https://index.docker.io/v1/', + 'InitPath': '/usr/local/bin/docker', + 'InitSha1': '', + 'KernelVersion': '3.16.1-tinycore64', + 'MemoryLimit': 1, + 'NEventsListener': 0, + 'NFd': 11, + 'NGoroutines': 12, + 'OperatingSystem': 'Boot2Docker 1.2.0 (TCL 5.3);', + 'SwapLimit': 1} +``` + +## insert +*DEPRECATED* + +## inspect_container + +Identical to the `docker inspect` command, but only for containers. + +**Params**: + +* container (str): The container to inspect + +**Returns** (dict): Nearly the same output as `docker inspect`, just as a +single dict + +## inspect_image + +Identical to the `docker inspect` command, but only for images. + +**Params**: + +* image_id (str): The image to inspect + +**Returns** (dict): Nearly the same output as `docker inspect`, just as a +single dict + +## inspect_network + +Retrieve network info by id. + +**Params**: + +* net_id (str): network id + +**Returns** (dict): Network information dictionary + +## inspect_volume + +Retrieve volume info by name. + +**Params**: + +* name (str): volume name + +**Returns** (dict): Volume information dictionary + +```python +>>> cli.inspect_volume('foobar') +{u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data', u'Driver': u'local', u'Name': u'foobar'} +``` + +## kill + +Kill a container or send a signal to a container. + +**Params**: + +* container (str): The container to kill +* signal (str or int): The signal to send. Defaults to `SIGKILL` + +## load_image + +Load an image that was previously saved using `Client.get_image` +(or `docker save`). Similar to `docker load`. + +**Params**: + +* data (binary): Image data to be loaded + +## login + +Nearly identical to the `docker login` command, but non-interactive. + +**Params**: + +* username (str): The registry username +* password (str): The plaintext password +* email (str): The email for the registry account +* registry (str): URL to the registry. Ex:`https://index.docker.io/v1/` +* reauth (bool): Whether refresh existing authentication on the docker server. +* dockercfg_path (str): Use a custom path for the .dockercfg file + (default `$HOME/.dockercfg`) + +**Returns** (dict): The response from the login request + +## logs + +Identical to the `docker logs` command. The `stream` parameter makes the `logs` +function return a blocking generator you can iterate over to retrieve log +output as it happens. + +**Params**: + +* container (str): The container to get logs from +* stdout (bool): Get STDOUT +* stderr (bool): Get STDERR +* stream (bool): Stream the response +* timestamps (bool): Show timestamps +* tail (str or int): Output specified number of lines at the end of logs: `"all"` or `number`. Default `"all"` +* since (datetime or int): Show logs since a given datetime or integer epoch (in seconds) +* follow (bool): Follow log output + +**Returns** (generator or str): + +## networks + +List networks currently registered by the docker daemon. Similar to the `docker networks ls` command. + +**Params** + +* names (list): List of names to filter by +* ids (list): List of ids to filter by + +The above are combined to create a filters dict. + +**Returns** (dict): List of network objects. + +## pause + +Pauses all processes within a container. + +**Params**: + +* container (str): The container to pause + + +## ping + +Hits the `/_ping` endpoint of the remote API and returns the result. An +exception will be raised if the endpoint isn't responding. + +**Returns** (bool) + +## port +Lookup the public-facing port that is NAT-ed to `private_port`. Identical to +the `docker port` command. + +**Params**: + +* container (str): The container to look up +* private_port (int): The private port to inspect + +**Returns** (list of dict): The mapping for the host ports + +```bash +$ docker run -d -p 80:80 ubuntu:14.04 /bin/sleep 30 +7174d6347063a83f412fad6124c99cffd25ffe1a0807eb4b7f9cec76ac8cb43b +``` +```python +>>> cli.port('7174d6347063', 80) +[{'HostIp': '0.0.0.0', 'HostPort': '80'}] +``` + +## pull + +Identical to the `docker pull` command. + +**Params**: + +* repository (str): The repository to pull +* tag (str): The tag to pull +* stream (bool): Stream the output as a generator +* insecure_registry (bool): Use an insecure registry +* auth_config (dict): Override the credentials that Client.login has set for this request + `auth_config` should contain the `username` and `password` keys to be valid. + +**Returns** (generator or str): The output + +```python +>>> from docker import Client +>>> cli = Client(base_url='tcp://127.0.0.1:2375') +>>> for line in cli.pull('busybox', stream=True): +... print(json.dumps(json.loads(line), indent=4)) +{ + "status": "Pulling image (latest) from busybox", + "progressDetail": {}, + "id": "e72ac664f4f0" +} +{ + "status": "Pulling image (latest) from busybox, endpoint: ...", + "progressDetail": {}, + "id": "e72ac664f4f0" +} +``` + +## push + +Push an image or a repository to the registry. Identical to the `docker push` +command. + +**Params**: + +* repository (str): The repository to push to +* tag (str): An optional tag to push +* stream (bool): Stream the output as a blocking generator +* insecure_registry (bool): Use `http://` to connect to the registry + +**Returns** (generator or str): The output of the upload + +```python +>>> from docker import Client +>>> cli = Client(base_url='tcp://127.0.0.1:2375') +>>> response = [line for line in cli.push('yourname/app', stream=True)] +>>> response +['{"status":"Pushing repository yourname/app (1 tags)"}\\n', + '{"status":"Pushing","progressDetail":{},"id":"511136ea3c5a"}\\n', + '{"status":"Image already pushed, skipping","progressDetail":{}, + "id":"511136ea3c5a"}\\n', + ... + '{"status":"Pushing tag for rev [918af568e6e5] on { + https://cdn-registry-1.docker.io/v1/repositories/ + yourname/app/tags/latest}"}\\n'] +``` + +## put_archive + +Insert a file or folder in an existing container using a tar archive as source. + +**Params**: + +* container (str): The container where the file(s) will be extracted +* path (str): Path inside the container where the file(s) will be extracted. + Must exist. +* data (bytes): tar data to be extracted + +**Returns** (bool): True if the call succeeds. `docker.errors.APIError` will +be raised if an error occurs. + +## remove_container + +Remove a container. Similar to the `docker rm` command. + +**Params**: + +* container (str): The container to remove +* v (bool): Remove the volumes associated with the container +* link (bool): Remove the specified link and not the underlying container +* force (bool): Force the removal of a running container (uses SIGKILL) + +## remove_image + +Remove an image. Similar to the `docker rmi` command. + +**Params**: + +* image (str): The image to remove +* force (bool): Force removal of the image +* noprune (bool): Do not delete untagged parents + +## remove_network + +Remove a network. Similar to the `docker network rm` command. + +**Params**: + +* net_id (str): The network's id + +Failure to remove will raise a `docker.errors.APIError` exception. + +## remove_volume + +Remove a volume. Similar to the `docker volume rm` command. + +**Params**: + +* name (str): The volume's name + +Failure to remove will raise a `docker.errors.APIError` exception. + +## rename + +Rename a container. Similar to the `docker rename` command. + +**Params**: + +* container (str): ID of the container to rename +* name (str): New name for the container + +## resize + +Resize the tty session. + +**Params**: + +* container (str or dict): The container to resize +* height (int): Height of tty session +* width (int): Width of tty session + +## restart + +Restart a container. Similar to the `docker restart` command. + +If `container` a dict, the `Id` key is used. + +**Params**: + +* container (str or dict): The container to restart +* timeout (int): Number of seconds to try to stop for before killing the +container. Once killed it will then be restarted. Default is 10 seconds. + +## search +Identical to the `docker search` command. + +**Params**: + +* term (str): A term to search for + +**Returns** (list of dicts): The response of the search + +```python +>>> from docker import Client +>>> cli = Client(base_url='tcp://127.0.0.1:2375') +>>> response = cli.search('nginx') +>>> response[:2] +[{'description': 'Official build of Nginx.', + 'is_official': True, + 'is_trusted': False, + 'name': 'nginx', + 'star_count': 266}, + {'description': 'Trusted automated Nginx (http://nginx.org/) ...', + 'is_official': False, + 'is_trusted': True, + 'name': 'dockerfile/nginx', + 'star_count': 60}, + ... +``` + +## start + +Similar to the `docker start` command, but doesn't support attach options. Use +`.logs()` to recover `stdout`/`stderr`. + +**Params**: + +* container (str): The container to start + +**Deprecation warning:** For API version > 1.15, it is highly recommended to + provide host config options in the + [`host_config` parameter of `create_container`](#create_container) + +```python +>>> from docker import Client +>>> cli = Client(base_url='tcp://127.0.0.1:2375') +>>> container = cli.create_container( +... image='busybox:latest', +... command='/bin/sleep 30') +>>> response = cli.start(container=container.get('Id')) +>>> print(response) +None +``` + +## stats + +The Docker API parallel to the `docker stats` command. +This will stream statistics for a specific container. + +**Params**: + +* container (str): The container to stream statistics for +* decode (bool): If set to true, stream will be decoded into dicts on the + fly. False by default. +* stream (bool): If set to false, only the current stats will be returned + instead of a stream. True by default. + +```python +>>> from docker import Client +>>> cli = Client(base_url='tcp://127.0.0.1:2375') +>>> stats_obj = cli.stats('elasticsearch') +>>> for stat in stats_obj: +>>> print(stat) +{"read":"2015-02-11T21:47:30.49388286+02:00","networks":{"eth0":{"rx_bytes":648,"rx_packets":8 ... +... +... +... +``` + +## stop + +Stops a container. Similar to the `docker stop` command. + +**Params**: + +* container (str): The container to stop +* timeout (int): Timeout in seconds to wait for the container to stop before +sending a `SIGKILL` + +## tag + +Tag an image into a repository. Identical to the `docker tag` command. + +**Params**: + +* image (str): The image to tag +* repository (str): The repository to set for the tag +* tag (str): The tag name +* force (bool): Force + +**Returns** (bool): True if successful + +## top +Display the running processes of a container. + +**Params**: + +* container (str): The container to inspect +* ps_args (str): An optional arguments passed to ps (e.g., aux) + +**Returns** (str): The output of the top + +```python +>>> from docker import Client +>>> cli = Client(base_url='tcp://127.0.0.1:2375') +>>> cli.create_container('busybox:latest', '/bin/sleep 30', name='sleeper') +>>> cli.start('sleeper') +>>> cli.top('sleeper') +{'Processes': [['952', 'root', '/bin/sleep 30']], + 'Titles': ['PID', 'USER', 'COMMAND']} +``` + +## unpause + +Unpause all processes within a container. + +**Params**: + +* container (str): The container to unpause + +## update_container + +Update resource configs of one or more containers. + +**Params**: + +* container (str): The container to inspect +* blkio_weight (int): Block IO (relative weight), between 10 and 1000 +* cpu_period (int): Limit CPU CFS (Completely Fair Scheduler) period +* cpu_quota (int): Limit CPU CFS (Completely Fair Scheduler) quota +* cpu_shares (int): CPU shares (relative weight) +* cpuset_cpus (str): CPUs in which to allow execution +* cpuset_mems (str): MEMs in which to allow execution +* mem_limit (int or str): Memory limit +* mem_reservation (int or str): Memory soft limit +* memswap_limit (int or str): Total memory (memory + swap), -1 to disable swap +* kernel_memory (int or str): Kernel memory limit + +**Returns** (dict): Dictionary containing a `Warnings` key. + +## version + +Nearly identical to the `docker version` command. + +**Returns** (dict): The server version information + +```python +>>> from docker import Client +>>> cli = Client(base_url='tcp://127.0.0.1:2375') +>>> cli.version() +{ + "KernelVersion": "3.16.4-tinycore64", + "Arch": "amd64", + "ApiVersion": "1.15", + "Version": "1.3.0", + "GitCommit": "c78088f", + "Os": "linux", + "GoVersion": "go1.3.3" +} +``` + +## volumes + +List volumes currently registered by the docker daemon. Similar to the `docker volume ls` command. + +**Params** + +* filters (dict): Server-side list filtering options. + +**Returns** (dict): Dictionary with list of volume objects as value of the `Volumes` key. + +```python +>>> cli.volumes() +{u'Volumes': [ + {u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data', u'Driver': u'local', u'Name': u'foobar'}, + {u'Mountpoint': u'/var/lib/docker/volumes/baz/_data', u'Driver': u'local', u'Name': u'baz'} +]} +``` + +## wait +Identical to the `docker wait` command. Block until a container stops, then +return its exit code. Returns the value `-1` if the API responds without a +`StatusCode` attribute. + +If `container` is a dict, the `Id` key is used. + +If the timeout value is exceeded, a `requests.exceptions.ReadTimeout` +exception will be raised. + +**Params**: + +* container (str or dict): The container to wait on +* timeout (int): Request timeout + +**Returns** (int): The exit code of the container + + + diff --git a/testbed/docker__docker-py/docs/boot2docker.md b/testbed/docker__docker-py/docs/boot2docker.md new file mode 100644 index 0000000000000000000000000000000000000000..4854e4142500dc2480d88a832370a82b2952e2c9 --- /dev/null +++ b/testbed/docker__docker-py/docs/boot2docker.md @@ -0,0 +1,38 @@ +# Using with Boot2docker + +For usage with boot2docker, there is a helper function in the utils package named `kwargs_from_env`, it will pass any environment variables from Boot2docker to the Client. + +First run boot2docker in your shell: +```bash +$ eval "$(boot2docker shellinit)" +Writing /Users/you/.boot2docker/certs/boot2docker-vm/ca.pem +Writing /Users/you/.boot2docker/certs/boot2docker-vm/cert.pem +Writing /Users/you/.boot2docker/certs/boot2docker-vm/key.pem +``` + +You can then instantiate `docker.Client` like this: +```python +from docker.client import Client +from docker.utils import kwargs_from_env + +cli = Client(**kwargs_from_env()) +print cli.version() +``` + +If you're encountering the following error: +`SSLError: hostname '192.168.59.103' doesn't match 'boot2docker'`, you can: + +1. Add an entry to your /etc/hosts file matching boot2docker to the daemon's IP +1. disable hostname validation (but please consider the security implications + in doing this) + +```python +from docker.client import Client +from docker.utils import kwargs_from_env + +kwargs = kwargs_from_env() +kwargs['tls'].assert_hostname = False + +cli = Client(**kwargs) +print cli.version() +``` \ No newline at end of file diff --git a/testbed/docker__docker-py/docs/change_log.md b/testbed/docker__docker-py/docs/change_log.md new file mode 100644 index 0000000000000000000000000000000000000000..c5637d685197c6ab8f501afb60d915ceb1ce102c --- /dev/null +++ b/testbed/docker__docker-py/docs/change_log.md @@ -0,0 +1,857 @@ +Change Log +========== + +1.7.2 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/issues?q=milestone%3A1.7.2+is%3Aclosed) + +### Bugfixes + +* Fixed a bug where TLS verification was improperly executed when providing + a custom CA certificate. + +1.7.1 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/issues?q=milestone%3A1.7.1+is%3Aclosed) + +### Features + +* Added support for `shm_size` in `Client.create_host_config` + +### Bugfixes + +* Fixed a bug where Dockerfile would sometimes be excluded from the build + context. +* Fixed a bug where a docker config file containing unknown keys would raise + an exception. +* Fixed an issue with SSL connections behaving improperly when pyOpenSSL + was installed in the same environment. +* Several TLS configuration improvements + + +1.7.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/issues?q=milestone%3A1.7.0+is%3Aclosed) + +### Features + +* Added support for cusom IPAM configuration in `Client.create_network` +* Added input support to `Client.exec_create` +* Added support for `stop_signal` in `Client.create_host_config` +* Added support for custom HTTP headers in Docker config file. +* Added support for unspecified transfer protocol in `base_url` when TLS is + enabled. + + +### Bugfixes + +* Fixed a bug where the `filters` parameter in `Client.volumes` would not be + applied properly. +* Fixed a bug where memory limits would parse to incorrect values. +* Fixed a bug where the `devices` parameter in `Client.create_host_config` + would sometimes be misinterpreted. +* Fixed a bug where instantiating a `Client` object would sometimes crash if + `base_url` was unspecified. +* Fixed a bug where an error message related to TLS configuration would link + to a non-existent (outdated) docs page. + + +### Miscellaneous + +* Processing of `.dockerignore` has been made significantly faster. +* Dropped explicit support for Python 3.2 + +1.6.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/issues?q=milestone%3A1.6.0+is%3Aclosed) + +### Features + +* Added support for the `since` param in `Client.logs` (introduced in API + version 1.19) +* Added support for the `DOCKER_CONFIG` environment variable when looking up + auth config +* Added support for the `stream` param in `Client.stats` (when set to `False`, + allows user to retrieve a single snapshot instead of a constant data stream) +* Added support for the `mem_swappiness`, `oom_kill_disable` params + in `Client.create_host_config` +* Added support for build arguments in `Client.build` through the `buildargs` + param. + + +### Bugfixes + +* Fixed a bug where streaming data over HTTPS would sometimes behave + incorrectly with Python 3.x +* Fixed a bug where commands containing unicode characters would be incorrectly + handled by `Client.create_container`. +* Fixed a bug where auth config credentials containing unicode characters would + cause failures when pushing / pulling images. +* Setting `tail=0` in `Client.logs` no longer shows past logs. +* Fixed a bug where `Client.pull` and `Client.push` couldn't handle image names + containing a dot. + + +### Miscellaneous + +* Default API version is now 1.21 (introduced in Docker 1.9.0) +* Several test improvements and cleanup that should make the suite easier to + expand and maintain moving forward. + + +1.5.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/issues?q=milestone%3A1.5.0+is%3Aclosed) + +### Features + +* Added support for the networking API introduced in Docker 1.9.0 + (`Client.networks`, `Client.create_network`, `Client.remove_network`, + `Client.inspect_network`, `Client.connect_container_to_network`, + `Client.disconnect_container_from_network`). +* Added support for the volumes API introduced in Docker 1.9.0 + (`Client.volumes`, `Client.create_volume`, `Client.inspect_volume`, + `Client.remove_volume`). +* Added support for the `group_add` parameter in `create_host_config`. +* Added support for the CPU CFS (`cpu_quota` and `cpu_period`) parameteres + in `create_host_config`. +* Added support for the archive API endpoint (`Client.get_archive`, + `Client.put_archive`). +* Added support for `ps_args` parameter in `Client.top`. + + +### Bugfixes + +* Fixed a bug where specifying volume binds with unicode characters would + fail. +* Fixed a bug where providing an explicit protocol in `Client.port` would fail + to yield the expected result. +* Fixed a bug where the priority protocol returned by `Client.port` would be UDP + instead of the expected TCP. + +### Miscellaneous + +* Broke up Client code into several files to facilitate maintenance and + contribution. +* Added contributing guidelines to the repository. + +1.4.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/issues?q=milestone%3A1.4.0+is%3Aclosed) + +### Deprecation warning + +* `docker.utils.create_host_config` is deprecated in favor of + `Client.create_host_config`. + +### Features + +* Added `utils.parse_env_file` to support env-files. + See [docs](http://docker-py.readthedocs.org/en/latest/api/#create_container) + for usage. +* Added support for arbitrary log drivers +* Added support for URL paths in the docker host URL (`base_url`) +* Drastically improved support for .dockerignore syntax + +### Bugfixes + +* Fixed a bug where exec_inspect would allow invocation when the API version + was too low. +* Fixed a bug where `docker.utils.ports.split_port` would break if an open + range was provided. +* Fixed a bug where invalid image IDs / container IDs could be provided to + bypass or reroute request URLs +* Default `base_url` now adapts depending on the OS (better Windows support) +* Fixed a bug where using an integer as the user param in + `Client.create_container` would result in a failure. + +### Miscellaneous + +* Docs fixes +* Integration tests are now run as part of our continuous integration. +* Updated dependency on `six` library + +1.3.1 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/issues?q=milestone%3A1.3.1+is%3Aclosed) + +### Bugfixes + +* Fixed a bug where empty chunks in streams was misinterpreted as EOF. +* `datetime` arguments passed to `Client.events` parameters `since` and + `until` are now always considered to be UTC. +* Fixed a bug with Docker 1.7.x where the wrong auth headers were being passed + in `Client.build`, failing builds that depended on private images. +* `Client.exec_create` can now retrieve the `Id` key from a dictionary for its + container param. + +### Miscellaneous + +* 404 API status now raises `docker.errors.NotFound`. This exception inherits + `APIError` which was used previously. +* Docs fixes +* Test fixes + +1.3.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/issues?q=milestone%3A1.3.0+is%3Aclosed) + +### Deprecation warning + +* As announced in the 1.2.0 release, `Client.execute` has been removed in favor + of `Client.exec_create` and `Client.exec_start`. + +### Features + +* `extra_hosts` parameter in host config can now also be provided as a list. +* Added support for `memory_limit` and `memswap_limit` in host config to + comply with recent deprecations. +* Added support for `volume_driver` in `Client.create_container` +* Added support for advanced modes in volume binds (using the `mode` key) +* Added support for `decode` in `Client.build` (decodes JSON stream on the fly) +* docker-py will now look for login configuration under the new config path, + and fall back to the old `~/.dockercfg` path if not present. + +### Bugfixes + +* Configuration file lookup now also work on platforms that don't define a + `$HOME` environment variable. +* Fixed an issue where pinging a v2 private registry wasn't working properly, + preventing users from pushing and pulling. +* `pull` parameter in `Client.build` now defaults to `False`. Fixes a bug where + the default options would try to force a pull of non-remote images. +* Fixed a bug where getting logs from tty-enabled containers wasn't working + properly with more recent versions of Docker +* `Client.push` and `Client.pull` will now raise exceptions if the HTTP + status indicates an error. +* Fixed a bug with adapter lookup when using the Unix socket adapter + (this affected some weird edge cases, see issue #647 for details) +* Fixed a bug where providing `timeout=None` to `Client.stop` would result + in an exception despite the usecase being valid. +* Added `git@` to the list of valid prefixes for remote build paths. + +### Dependencies + +* The websocket-client dependency has been updated to a more recent version. + This new version also supports Python 3.x, making `attach_socket` available + on those versions as well. + +### Documentation + +* Various fixes + +1.2.3 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/issues?q=milestone%3A1.2.3+is%3Aclosed) + +### Deprecation warning + +* Passing host config in the `Client.start` method is now deprecated. Please + use the `host_config` in `Client.create_container` instead. + +### Features + +* Added support for `privileged` param in `Client.exec_create` + (only available in API >= 1.19) +* Volume binds can now also be specified as a list of strings. + +### Bugfixes + +* Fixed a bug where the `read_only` param in host_config wasn't handled + properly. +* Fixed a bug in `Client.execute` (this method is still deprecated). +* The `cpuset` param in `Client.create_container` is also passed as + the `CpusetCpus` param (`Cpuset` deprecated in recent versions of the API) +* Fixed an issue with integration tests being run inside a container + (`make integration-test`) +* Fixed a bug where an empty string would be considered a valid container ID + or image ID. +* Fixed a bug in `Client.insert` + + +### Documentation + +* Various fixes + +1.2.2 +----- + +### Bugfixes + +* Fixed a bug where parameters passed to `Client.exec_resize` would be ignored (#576) +* Fixed a bug where auth config wouldn't be resolved properly in `Client.pull` (#577) + +1.2.1 +----- + +### Bugfixes + +* Fixed a bug where the check_resource decorator would break with some + argument-passing methods. (#573) + +1.2.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/issues?q=milestone%3A1.2.0+is%3Aclosed) + +### Deprecation warning + +* `Client.execute` is being deprecated in favor of the more dev-friendly + `Client.exec_start` and `Client.exec_create`. **It will be removed in 1.3.0** + +### Features + +* Added `exec_create`, `exec_start`, `exec_inspect` and `exec_resize` to + client, accurately mirroring the + [Exec API](https://docs.docker.com/reference/api/docker_remote_api_v1.18/#exec-create) +* Added `auth_config` param to `Client.pull` (allows to use one-off credentials + for this pull request) +* Added support for `ipc_mode` in host config. +* Added support for the `log_config` param in host config. +* Added support for the `ulimit` param in host config. +* Added support for container resource limits in `Client.build`. +* When a resource identifier (image or container ID) is passed to a Client + method, we now check for `None` values to avoid crashing + (now raises `docker.errors.NullResource`) +* Added tools to parse port ranges inside the new `docker.utils.ports` package. +* Added a `version_info` attribute to the `docker` package. + +### Bugfixes + +* Fixed a bug in `Client.port` where absence of a certain key in the + container's JSON would raise an error (now just returns `None`) +* Fixed a bug with the `trunc` parameter in `Client.containers` having no + effect (moved functionality to the client) +* Several improvements have been made to the `Client.import_image` method. +* Fixed pushing / pulling to + [v2 registries](https://github.com/docker/distribution) +* Fixed a bug where passing a container dictionary to `Client.commit` + would fail + +### Miscellaneous + +* Default API version has been bumped to 1.18 (Docker Engine 1.6.0) +* Several testing coverage improvements +* Docs fixes and improvements + +1.1.0 +----- + +### Features + +* Added `dockerfile` param support to `Client.build` (mirrors + `docker build -f` behavior) +* Added the ability to specify `'auto'` as `version` in `Client.__init__`, + allowing the constructor to autodetect the daemon's API version. + +### Bugfixes + +* Fixed a bug where decoding a result stream using the `decode` parameter + would break when using Python 3.x +* Fixed a bug where some files in `.dockerignore` weren't being handled + properly +* Fixed `resolve_authconfig` issues by bringing it closer to Docker Engine's + behavior. This should fix all issues encountered with private registry auth +* Fixed an issue where passwords containing a colon weren't being handled + properly. +* Bumped `requests` version requirement, which should fix most of the SSL + issues encountered recently. + +### Miscellaneous + +* Several integration test improvements. +* Fixed some unclosed resources in unit tests. +* Several docs improvements. + +1.0.0 +----- + +### Features + +* Added new `Client.rename` method (`docker rename`) +* Added now `Client.stats` method (`docker stats`) +* Added `read_only` param support to `utils.create_host_config` and + `Client.start` (`docker run --read-only`) +* Added `pid_mode` param support to `utils.create_host_config` and + `Client.start` (`docker run --pid='host'`) +* Added `since`, `until` and `filters` params to `Client.events`. +* Added `decode` parameter to `Client.stats` and `Client.events` to decode + JSON objects on the fly (False by default). + +### Bugfixes + +* Fixed a bug that caused `Client.build` to crash when the provided source was + a remote source. + +### Miscellaneous + +* Default API version has been bumped to 1.17 (Docker Engine 1.5.0) +* `Client.timeout` is now a public attribute, and users are encouraged to use it + when request timeouts need to be changed at runtime. +* Added `Client.api_version` as a read-only property. +* The `memswap_limit` argument in `Client.create_container` now accepts string + type values similar to `mem_limit` ('6g', '120000k', etc.) +* Improved documentation + +0.7.2 +----- + +### Features + +* Added support for `mac_address` in `Client.create_container` + +### Bugfixes + +* Fixed a bug where streaming responses (`pull`, `push`, `logs`, etc.) were + unreliable (#300) +* Fixed a bug where resolve_authconfig wouldn't properly resolve configuration + for private repositories (#468) +* Fixed a bug where some errors wouldn't be properly constructed in + `client.py`, leading to unhelpful exceptions bubbling up (#466) +* Fixed a bug where `Client.build` would try to close context when externally + provided (`custom_context == True`) (#458) +* Fixed an issue in `create_host_config` where empty sequences wouldn't be + interpreted properly (#462) + +### Miscellaneous + +* Added `resolve_authconfig` tests. + +0.7.1 +----- + +### Bugfixes + +* `setup.py` now indicates a maximum version of requests to work around the + boot2docker / `assert_hostname` bug. +* Removed invalid exception when using the Registry Hub's FQDN when pulling. +* Fixed an issue where early HTTP errors weren't handled properly in streaming + responses. +* Fixed a bug where sockets would close unexpectedly using Python 3.x +* Various fixes for integration tests. + +### Miscellaneous + +* Small doc fixes + +0.7.0 +----- + +### Breaking changes + +* Passing `dns` or `volumes_from` in `Client.start` with API version < 1.10 + will now raise an exception (previously only triggered a warning) + +### Features + +* Added support for `host_config` in `Client.create_container` +* Added utility method `docker.utils.create_host_config` to help build a + proper `HostConfig` dictionary. +* Added support for the `pull` parameter in `Client.build` +* Added support for the `forcerm` parameter in `Client.build` +* Added support for `extra_hosts` in `Client.start` +* Added support for a custom `timeout` in `Client.wait` +* Added support for custom `.dockercfg` loading in `Client.login` + (`dockercfg_path` argument) + +### Bugfixes + +* Fixed a bug where some output wouldn't be streamed properly in streaming + chunked responses +* Fixed a bug where the `devices` param didn't recognize the proper delimiter +* `Client.login` now properly expands the `registry` URL if provided. +* Fixed a bug where unicode characters in passed for `environment` in + `create_container` would break. + +### Miscellaneous + +* Several unit tests and integration tests improvements. +* `Client` constructor now enforces passing the `version` parameter as a + string. +* Build context files are now ordered by filename when creating the archive + (for consistency with docker mainline behavior) + +0.6.0 +----- +* **This version introduces breaking changes!** + +### Breaking changes + +* The default SSL protocol is now the highest TLS v1.x (was SSL v2.3 before) + (Poodle fix) +* The `history` command now returns a dict instead of a raw JSON string. + +### Features + +* Added the `execute` command. +* Added `pause` and `unpause` commands. +* Added support fo the `cpuset` param in `create_container` +* Added support for host devices (`devices` param in `start`) +* Added support for the `tail` param in `logs`. +* Added support for the `filters` param in `images` and `containers` +* The `kwargs_from_env` method is now available in the `docker.utils` + module. This should make it easier for boot2docker user to connect + to their daemon. + +### Bugfixes + +* Fixed a bug where empty directories weren't correctly included when + providing a context to `Client.build`. +* Fixed a bug where UNIX socket connections weren't properly cleaned up, + causing `ResourceWarning`s to appear in some cases. +* Fixed a bug where docker-py would crash if the docker daemon was stopped + while reading a streaming response +* Fixed a bug with streaming responses in Python 3 +* `remove_image` now supports a dict containing an `Id` key as its `id` + parameter (similar to other methods requiring a resource ID) + +### Documentation + +* Added new MkDocs documentation. Currently hosted on + [ReadTheDocs](http://docker-py.readthedocs.org/en/latest/) + +### Miscellaneous + +* Added tests to sdist +* Added a Makefile for running tests in Docker +* Updated Dockerfile + +0.5.3 +----- + +* Fixed attaching when connecting to the daemon over a UNIX socket. + +0.5.2 +----- + +* Fixed a bug where sockets were closed immediately when attaching over + TLS. + +0.5.1 +----- + +* Added a `assert_hostname` option to `TLSConfig` which can be used to + disable verification of hostnames. +* Fixed SSL not working due to an incorrect version comparison +* Fixed streams not working on Windows + +0.5.0 +----- + +* **This version introduces breaking changes!** +* Added `insecure_registry` parameter in `Client.push` and `Client.pull`. + *It defaults to False and code pushing to non-HTTPS private registries + might break as a result.* +* Added support for adding and dropping capabilities +* Added support for restart policy +* Added support for string values in `Client.create_container`'s `mem_limit` +* Added support for `.dockerignore` file in `Client.build` + +### Bugfixes + +* Fixed timeout behavior in `Client.stop` + +### Miscellaneous + +* `Client.create_container` provides better validation of the `volumes` + parameter +* Improved integration tests + +0.4.0 +----- + +* **This version introduces breaking changes!** +* The `base_url` parameter in the `Client` constructor should now allow most + of the `DOCKER_HOST` environment values (except for the fd:// protocol) + * As a result, URLs that don't specify a port are now invalid (similar + to the official client's behavior) +* Added TLS support (see [documentation](https://github.com/dotcloud/docker-py#connection-to-daemon-using-https)) + +### Bugfixes + +* Fixed an issue with `Client.build` streamed logs in Python 3 + +### Miscellaneous + +* Added unit tests coverage +* Various integration tests fixes + +0.3.2 +----- + +* Default API version is now 1.12 (support for docker 1.0) +* Added new methods `Client.get_image` and `Client.load_image` + (`docker save` and `docker load`) +* Added new method `Client.ping` +* Added new method `Client.resize` +* `Client.build` can now be provided with a custom context using the + `custom_context` parameter. +* Added support for `memswap_limit` parameter in `create_container` +* Added support for `force` parameter in `remove_container` +* Added support for `force` and `noprune` parameters in `remove_image` +* Added support for `timestamps` parameter in `logs` +* Added support for `dns_search` parameter in `start` +* Added support for `network_mode` parameter in `start` +* Added support for `size` parameter in `containers` +* Added support for `volumes_from` and `dns` parameters in `start`. As of + API version >= 1.10, these parameters no longer belong to `create_container` +* `Client.logs` now uses the logs endpoint when API version is sufficient + +### Bugfixes + +* Fixed a bug in pull where the `repo:tag` notation wasn't interpreted + properly +* Fixed a bug in streaming methods with python 3 (unicode, bytes/str related) +* Fixed a bug in `Client.start` where legacy notation for volumes wasn't + supported anymore. + +### Miscellaneous + +* The client now raises `DockerException`s when appropriate. You can import + `DockerException` (and its subclasses) from the `docker.errors` module to + catch them if needed. +* `docker.APIError` has been moved to the new `docker.errors` module as well. +* `Client.insert` is deprecated in API version > 1.11 +* Improved integration tests should now run much faster. +* There is now a single source of truth for the docker-py version number. + +0.3.1 +----- + +* Default API version is now 1.9 +* Streaming responses no longer yield blank lines. +* `Client.create_container` now supports the `domainname` parameter. +* `volumes_from` parameter in `Client.create_container` now supports + iterables. +* Auth credentials are provided to the docker daemon when using `Client.build` + (new feature in API version 1.9) + + +### Bugfixes + +* Various fixes for response streams (`logs`, `pull`, etc.). +* Fixed a bug with `Client.push` when using API version < 1.5 +* Fixed a bug with API version checks. + +### Miscellaneous + +* `mock` has been removed from the runtime requirements. +* Added installation instructions in the README. + +0.3.0 +----- + +* **This version introduces breaking changes!** +* Support for API version 1.7 through 1.9 (Docker 0.8.0+) +* Default API version is now 1.8 +* The client has been updated to support Requests 2.x. `requests==2.2.1` + is now the recommended version. +* Links can now be specified as tuples in `Client.start` (see docs for + more information) +* Added support for various options in `Client.create_container` + (`network_disabled`, `cpu_shares`, `working_dir` and `entrypoint`) +* `Client.attach` has been reworked to work similarly to `Client.logs` + minus the historical data. +* Logs can now be streamed using the `stream` parameter. +* Added support for `tcp://` URLs as client `base_url`. +* Various auth improvements. +* Added support for custom `Client.build` timeout. + + +### Bugfixes + +* Fixed a bug where determining the protocol of a private registry + would sometimes yield the wrong result. +* Fixed a bug where `Client.copy` wouldn't accept a dict as argument. +* Fixed several streaming bugs. +* Removed unused parameter in `Client.import_image`. +* The client's `base_url` now tolerates trailing slashes. + +#### Miscellaneous + +* Updated integration tests +* Small doc fixes + +0.2.3 +----- + +* Support for API version 1.6 +* Added support for links +* Added support for global request timeout +* Added `signal` parameter in `Client.kill` +* Added support for `publish_all_ports` in `Client.start` +* `Client.pull`, `Client.push` and `Client.build` can be streamed now +* Added support for websockets in `Client.attach` +* Fixed ports for Docker 0.6.5+ +* Added `Client.events` method (access to the `/events` endpoint) +* Changed the way the ports and volumes are provided in `Client.start` and + `Client.create_container̀` to make them simpler and more intuitive. + +### Bugfixes + +* Fixed a bug where private registries on HTTPS weren't handled properly +* Fixed a bug where auth would break with Python 3 + +### Miscellaneous + +* Test improvements +* Slight doc improvements + + +0.2.2 +----- + +* Added support for the `rm` parameter in `Client.build` +* Added support for tarball imports in `Client.import_image` through `data` + parameter. +* The `command` parameter in `Client.create_container` is now optional (for + containers that include a default run command) + +### Bugfixes + +* Fixed Python 3 support +* Fixed a bug where anonymous push/pull would break when no authconfig is + present +* Fixed a bug where the `quiet` parameter wouldn't be taken into account in + `Client.containers` +* Fixed a bug where `Client.push` would break when pushing to private + registries. +* Removed unused `registry` parameter in `Client.pull`. +* Removed obsolete custom error message in `Client.create_container`. + +### Miscellaneous + +* docker-py is now unit-tested, and Travis-CI has been enabled on the + source repository. + +0.2.1 +----- + +* Improvements to the `tox.ini` file + +### Bugfixes + +* Fixed a bug where the package would fail with an `ImportError` if requests + was installed using `apt-get` +* Fixed a bug where `Client.build` would fail if given a `path` parameter. +* Fixed several bugs in `Client.login`. It should now work with API versions + 1.4, 1.5. +* Please note that `Client.login` currently doesn't write auth to the + `.dockercfg` file, thus **auth is not persistent when using this method.** + +0.2.0 +----- + +* **This version introduces breaking changes!** +* `Client.kill`, `Client.remove_container`, `Client.remove_image`, +`Client.restart`, `Client.start`, `Client.stop` and `Client.wait` don't support +varargs anymore. +* Added commands `Client.top` and `Client.copy` +* Added `lxc_conf` parameter to `Client.start` +* Added support for authentication in `Client.pull` (API version >=1.5) +* Added support for privileged containers. +* Error management overhaul. The new version should be more consistent and +* All methods that expected a container ID as argument now also support a dict +containing an `Id` key. +* Added license header to python files. +* Several `README.md` updates. + +### Bugfixes + +* Fixed several bugs with auth config parsing. +* Fixed a bug in `Client.push` where it would raise an exception if +the auth config wasn't loaded. +* Fixed a bug in `Client.pull` where private registry images wouldn't be parsed +properly if it contained port information. + + +0.1.5 +----- + +* `Client.build` now uses tempfiles to store build context instead of storing +it in memory +* Added `nocache` option to `Client.build` +* `Client.remove_container` now raises an exception when trying to remove a +running container +* `Client.create_container` now accepts dicts for the `environment` parameter + +### Bugfixes + +* Fixed a bug in `Client.create_container` on Python 2.6 where unicode +commands would fail to be parsed +* Fixed a bug in `Client.build` where the `tag` parameter would not be taken +into account + +0.1.4 +----- + +* Added support for API connection through UNIX socket (default for docker 0.5.2+) + +0.1.3 +----- + +* The client now tries to load the auth config from `~/.dockercfg`. This is necessary to use the push command if API version is >1.0 + +0.1.2 +----- + +* Added a `quiet parameter` to `Client.build` (mirrors the `q` parameter in the API) + +0.1.1 +----- + +* Fixed a bug where the build command would list tar contents before sending the request +* Fixed a bug in `Client.port` + + +0.1.0 +----- +* **This version introduces breaking changes!** +* Switched to server side build system +* Removed the BuilderClient +* Added support for contextual builds +* Added support for remote URL builds +* Added python 3 support +* Added bind mounts support +* Added API version support +* Fixed a bug where `Client.port` would fail if provided with a port of type number +* Fixed a bug where `Client._post_json` wouldn't set the Content-Type header to `application/json` + +0.0.6 +----- +* Added support for custom loggers in `Client.build` +* Added `Client.attach` command +* Added support for `ADD` command in builder +* Fixed a bug in `Client.logs` +* Improved unit tests + + +0.0.5 +----- +* Added tag support for the builder +* Use `shlex` to parse plain string commands when creating a container +* Fixed several bugs in the builder +* Fixed the `quiet` option in `Client.images` +* Unit tests + +0.0.4 +----- +* Improved error reporting + +0.0.3 +----- +* Fixed a bug in `Client.tag` +* Fixed a bug where generated images would be removed after a successful build + +0.0.2 +----- +* Implemented first version of the builder client diff --git a/testbed/docker__docker-py/docs/contributing.md b/testbed/docker__docker-py/docs/contributing.md new file mode 100644 index 0000000000000000000000000000000000000000..e776458338464657e0880afd479317bb2ed7e77b --- /dev/null +++ b/testbed/docker__docker-py/docs/contributing.md @@ -0,0 +1,36 @@ +# Contributing +See the [Docker contributing guidelines](https://github.com/docker/docker/blob/master/CONTRIBUTING.md). +The following is specific to docker-py. + +## Running the tests & Code Quality + + +To get the source source code and run the unit tests, run: +``` +$ git clone git://github.com/docker/docker-py.git +$ cd docker-py +$ pip install tox +$ tox +``` + +## Building the docs +Docs are built with [MkDocs](http://www.mkdocs.org/). For development, you can +run the following in the project directory: +``` +$ pip install -r docs-requirements.txt +$ mkdocs serve +``` + +## Release Checklist + +Before a new release, please go through the following checklist: + +* Bump version in docker/version.py +* Add a release note in docs/change_log.md +* Git tag the version +* Upload to pypi + +## Vulnerability Reporting +For any security issues, please do NOT file an issue or pull request on github! +Please contact [security@docker.com](mailto:security@docker.com) or read [the +Docker security page](https://www.docker.com/resources/security/). diff --git a/testbed/docker__docker-py/docs/host-devices.md b/testbed/docker__docker-py/docs/host-devices.md new file mode 100644 index 0000000000000000000000000000000000000000..150a686255c3ab0b7cb601e340cb7e0addd29156 --- /dev/null +++ b/testbed/docker__docker-py/docs/host-devices.md @@ -0,0 +1,29 @@ +# Access to devices on the host + +If you need to directly expose some host devices to a container, you can use +the devices parameter in the `host_config` param in `Client.create_container` +as shown below: + +```python +cli.create_container( + 'busybox', 'true', host_config=cli.create_host_config(devices=[ + '/dev/sda:/dev/xvda:rwm' + ]) +) +``` + +Each string is a single mapping using the following format: +`::` +The above example allows the container to have read-write access to +the host's `/dev/sda` via a node named `/dev/xvda` inside the container. + +As a more verbose alternative, each host device definition can be specified as +a dictionary with the following keys: + +```python +{ + 'PathOnHost': '/dev/sda1', + 'PathInContainer': '/dev/xvda', + 'CgroupPermissions': 'rwm' +} +``` diff --git a/testbed/docker__docker-py/docs/hostconfig.md b/testbed/docker__docker-py/docs/hostconfig.md new file mode 100644 index 0000000000000000000000000000000000000000..4b841d51a428ba5cee17dbd6167957af04026bb0 --- /dev/null +++ b/testbed/docker__docker-py/docs/hostconfig.md @@ -0,0 +1,122 @@ +# HostConfig object + +The Docker Remote API introduced [support for HostConfig in version 1.15](http://docs.docker.com/reference/api/docker_remote_api_v1.15/#create-a-container). +This object contains all the parameters you could previously pass to `Client.start`. +*It is highly recommended that users pass the HostConfig in the `host_config`* +*param of `Client.create_container` instead of `Client.start`* + +## HostConfig helper + +### Client.create_host_config + +Creates a HostConfig dictionary to be used with `Client.create_container`. + +`binds` allows to bind a directory in the host to the container. See [Using +volumes](volumes.md) for more information. + +`port_bindings` exposes container ports to the host. +See [Port bindings](port-bindings.md) for more information. + +`lxc_conf` allows to pass LXC configuration options using a dictionary. + +`privileged` starts the container in privileged mode. + +[Links](http://docs.docker.io/en/latest/use/working_with_links_names/) can be +specified with the `links` argument. They can either be specified as a +dictionary mapping name to alias or as a list of `(name, alias)` tuples. + +`dns` and `volumes_from` are only available if they are used with version v1.10 +of docker remote API. Otherwise they are ignored. + +`network_mode` is available since v1.11 and sets the Network mode for the +container ('bridge': creates a new network stack for the container on the +Docker bridge, 'none': no networking for this container, 'container:[name|id]': +reuses another container network stack, 'host': use the host network stack +inside the container or any name that identifies an existing Docker network). + +`restart_policy` is available since v1.2.0 and sets the container's *RestartPolicy* +which defines the conditions under which a container should be restarted upon exit. +If no *RestartPolicy* is defined, the container will not be restarted when it exits. +The *RestartPolicy* is specified as a dict. For example, if the container +should always be restarted: +```python +{ + "MaximumRetryCount": 0, + "Name": "always" +} +``` + +It is possible to restart the container only on failure as well as limit the number +of restarts. For example: +```python +{ + "MaximumRetryCount": 5, + "Name": "on-failure" +} +``` + +`cap_add` and `cap_drop` are available since v1.2.0 and can be used to add or +drop certain capabilities. The user may specify the capabilities as an array +for example: +```python +[ + "SYS_ADMIN", + "MKNOD" +] +``` + + +**Params** + +* binds: Volumes to bind. See [Using volumes](volumes.md) for more information. +* port_bindings (dict): Port bindings. See [Port bindings](port-bindings.md) + for more information. +* lxc_conf (dict): LXC config +* oom_kill_disable (bool): Whether to disable OOM killer +* publish_all_ports (bool): Whether to publish all ports to the host +* links (dict or list of tuples): either as a dictionary mapping name to alias + or as a list of `(name, alias)` tuples +* privileged (bool): Give extended privileges to this container +* dns (list): Set custom DNS servers +* dns_search (list): DNS search domains +* volumes_from (str or list): List of container names or Ids to get volumes + from. Optionally a single string joining container id's with commas +* network_mode (str): One of `['bridge', 'none', 'container:', 'host']` +* restart_policy (dict): "Name" param must be one of + `['on-failure', 'always']` +* cap_add (list of str): Add kernel capabilities +* cap_drop (list of str): Drop kernel capabilities +* extra_hosts (dict): custom host-to-IP mappings (host:ip) +* read_only (bool): mount the container's root filesystem as read only +* pid_mode (str): if set to "host", use the host PID namespace inside the + container +* ipc_mode (str): Set the IPC mode for the container +* security_opt (list): A list of string values to customize labels for MLS + systems, such as SELinux. +* ulimits (list): A list of dicts or `docker.utils.Ulimit` objects. A list + of ulimits to be set in the container. +* log_config (`docker.utils.LogConfig` or dict): Logging configuration to + container +* mem_limit (str or int): Maximum amount of memory container is allowed to + consume. (e.g. `'1G'`) +* memswap_limit (str or int): Maximum amount of memory + swap a container is + allowed to consume. +* mem_swappiness (int): Tune a container's memory swappiness behavior. + Accepts number between 0 and 100. +* shm_size (str or int): Size of /dev/shm. (e.g. `'1G'`) +* cpu_group (int): The length of a CPU period in microseconds. +* cpu_period (int): Microseconds of CPU time that the container can get in a + CPU period. +* group_add (list): List of additional group names and/or IDs that the + container process will run as. +* devices (list): Host device bindings. See [host devices](host-devices.md) + for more information. + +**Returns** (dict) HostConfig dictionary + +```python +>>> from docker import Client +>>> cli = Client() +>>> cli.create_host_config(privileged=True, cap_drop=['MKNOD'], volumes_from=['nostalgic_newton']) +{'CapDrop': ['MKNOD'], 'LxcConf': None, 'Privileged': True, 'VolumesFrom': ['nostalgic_newton'], 'PublishAllPorts': False} +``` diff --git a/testbed/docker__docker-py/docs/index.md b/testbed/docker__docker-py/docs/index.md new file mode 100644 index 0000000000000000000000000000000000000000..5b851f0a4bc6abed766bfa1da85578167cf359ee --- /dev/null +++ b/testbed/docker__docker-py/docs/index.md @@ -0,0 +1,15 @@ +# docker-py documentation + +An API client for docker written in Python + +## Installation + +Our latest stable is always available on PyPi. + + pip install docker-py + +## Documentation +Full documentation is available in the `/docs/` directory. + +## License +Docker is licensed under the Apache License, Version 2.0. See LICENSE for full license text diff --git a/testbed/docker__docker-py/docs/networks.md b/testbed/docker__docker-py/docs/networks.md new file mode 100644 index 0000000000000000000000000000000000000000..41935377e659b3ed57f1516b970de37b5d218d36 --- /dev/null +++ b/testbed/docker__docker-py/docs/networks.md @@ -0,0 +1,20 @@ +# Using Networks + +With the release of Docker 1.9 you can now manage custom networks. + + +Here you can see how to create a network named ```network1``` using the ```bridge``` driver + +```python +docker_client.create_network("network1", driver="bridge") +``` + +You can also create more advanced networks with custom IPAM configurations. For example, +setting the subnet to ```192.168.52.0/24``` and gateway to ```192.168.52.254``` + +```python + +ipam_config = docker.utils.create_ipam_config(subnet='192.168.52.0/24', gateway='192.168.52.254') + +docker_client.create_network("network1", driver="bridge", ipam=ipam_config) +``` diff --git a/testbed/docker__docker-py/docs/port-bindings.md b/testbed/docker__docker-py/docs/port-bindings.md new file mode 100644 index 0000000000000000000000000000000000000000..a9b973fd66ee9a8325967e415e9ea7eb9f6e595a --- /dev/null +++ b/testbed/docker__docker-py/docs/port-bindings.md @@ -0,0 +1,56 @@ +# Port bindings +Port bindings is done in two parts. Firstly, by providing a list of ports to +open inside the container in the `Client().create_container()` method. +Bindings are declared in the `host_config` parameter. + +```python +container_id = cli.create_container( + 'busybox', 'ls', ports=[1111, 2222], + host_config=cli.create_host_config(port_bindings={ + 1111: 4567, + 2222: None + }) +) +``` + + +You can limit the host address on which the port will be exposed like such: + +```python +cli.create_host_config(port_bindings={1111: ('127.0.0.1', 4567)}) +``` + +Or without host port assignment: + +```python +cli.create_host_config(port_bindings={1111: ('127.0.0.1',)}) +``` + +If you wish to use UDP instead of TCP (default), you need to declare ports +as such in both the config and host config: + +```python +container_id = cli.create_container( + 'busybox', 'ls', ports=[(1111, 'udp'), 2222], + host_config=cli.create_host_config(port_bindings={ + '1111/udp': 4567, 2222: None + }) +) +``` + +If trying to bind several IPs to the same port, you may use the following syntax: +```python +cli.create_host_config(port_bindings={ + 1111: [ + ('192.168.0.100', 1234), + ('192.168.0.101', 1234) + ] +}) +``` + +Similarly for several container ports bound to a single host port: +```python +cli.create_host_config(port_bindings={ + 1111: [1234, 4567] +}) +``` diff --git a/testbed/docker__docker-py/docs/tls.md b/testbed/docker__docker-py/docs/tls.md new file mode 100644 index 0000000000000000000000000000000000000000..85a22ee35787ee9171b9c633e0714e5be6c62ac3 --- /dev/null +++ b/testbed/docker__docker-py/docs/tls.md @@ -0,0 +1,86 @@ +## Connection to daemon using HTTPS + +**Note:** *These instructions are docker-py specific. Please refer to +[http://docs.docker.com/articles/https/](http://docs.docker.com/articles/https/) +first.* + +## TLSConfig + +**Params**: + +* client_cert (tuple of str): Path to client cert, path to client key +* ca_cert (str): Path to CA cert file +* verify (bool or str): This can be `False` or a path to a CA Cert file +* ssl_version (int): A valid [SSL version]( +https://docs.python.org/3.4/library/ssl.html#ssl.PROTOCOL_TLSv1) +* assert_hostname (bool): Verify hostname of docker daemon + +### configure_client + +**Params**: + +* client: ([Client](api.md#client-api)): A client to apply this config to + + +## Authenticate server based on public/default CA pool + +```python +client = docker.Client(base_url='', tls=True) +``` + +Equivalent CLI options: +```bash +docker --tls ... +``` + +If you want to use TLS but don't want to verify the server certificate +(for example when testing with a self-signed certificate): + +```python +tls_config = docker.tls.TLSConfig(verify=False) +client = docker.Client(base_url='', tls=tls_config) +``` + +## Authenticate server based on given CA + +```python +tls_config = docker.tls.TLSConfig(ca_cert='/path/to/ca.pem') +client = docker.Client(base_url='', tls=tls_config) +``` + +Equivalent CLI options: +```bash +docker --tlsverify --tlscacert /path/to/ca.pem ... +``` + +## Authenticate with client certificate, do not authenticate server based on given CA + +```python +tls_config = docker.tls.TLSConfig( + client_cert=('/path/to/client-cert.pem', '/path/to/client-key.pem') +) +client = docker.Client(base_url='', tls=tls_config) +``` + +Equivalent CLI options: +```bash +docker --tls --tlscert /path/to/client-cert.pem --tlskey /path/to/client-key.pem ... +``` + +## Authenticate with client certificate, authenticate server based on given CA + +```python +tls_config = docker.tls.TLSConfig( + client_cert=('/path/to/client-cert.pem', '/path/to/client-key.pem'), + verify='/path/to/ca.pem' +) +client = docker.Client(base_url='', tls=tls_config) +``` + +Equivalent CLI options: +```bash +docker --tlsverify \ + --tlscert /path/to/client-cert.pem \ + --tlskey /path/to/client-key.pem \ + --tlscacert /path/to/ca.pem ... +``` diff --git a/testbed/docker__docker-py/docs/volumes.md b/testbed/docker__docker-py/docs/volumes.md new file mode 100644 index 0000000000000000000000000000000000000000..04273d805d6058bcd02565ba48357f0bce911457 --- /dev/null +++ b/testbed/docker__docker-py/docs/volumes.md @@ -0,0 +1,34 @@ +# Using volumes + +Volume declaration is done in two parts. Provide a list of mountpoints to +the `Client().create_container()` method, and declare mappings in the +`host_config` section. + +```python +container_id = cli.create_container( + 'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2'], + host_config=cli.create_host_config(binds={ + '/home/user1/': { + 'bind': '/mnt/vol2', + 'mode': 'rw', + }, + '/var/www': { + 'bind': '/mnt/vol1', + 'mode': 'ro', + } + }) +) +``` + +You can alternatively specify binds as a list. This code is equivalent to the +example above: + +```python +container_id = cli.create_container( + 'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2'], + host_config=cli.create_host_config(binds=[ + '/home/user1/:/mnt/vol2', + '/var/www:/mnt/vol1:ro', + ]) +) +``` diff --git a/testbed/docker__docker-py/mkdocs.yml b/testbed/docker__docker-py/mkdocs.yml new file mode 100644 index 0000000000000000000000000000000000000000..c6bfae1013976c530ff58917ad99d3e8ca797e7e --- /dev/null +++ b/testbed/docker__docker-py/mkdocs.yml @@ -0,0 +1,18 @@ +site_name: docker-py Documentation +site_description: An API client for Docker written in Python +site_favicon: favicon_whale.png +site_url: http://docker-py.readthedocs.org +repo_url: https://github.com/docker/docker-py/ +theme: readthedocs +pages: +- Home: index.md +- Client API: api.md +- Port Bindings: port-bindings.md +- Using Volumes: volumes.md +- Using TLS: tls.md +- Host devices: host-devices.md +- Host configuration: hostconfig.md +- Network configuration: networks.md +- Using with boot2docker: boot2docker.md +- Change Log: change_log.md +- Contributing: contributing.md diff --git a/testbed/docker__docker-py/pytest.ini b/testbed/docker__docker-py/pytest.ini new file mode 100644 index 0000000000000000000000000000000000000000..21b47a6aaac05bcf6403de34c6a797607af37781 --- /dev/null +++ b/testbed/docker__docker-py/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +addopts = --tb=short -rxs diff --git a/testbed/docker__docker-py/requirements.txt b/testbed/docker__docker-py/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..72c255d3188ee1ca54915760be6f652ca728828f --- /dev/null +++ b/testbed/docker__docker-py/requirements.txt @@ -0,0 +1,3 @@ +requests==2.5.3 +six>=1.4.0 +websocket-client==0.32.0 diff --git a/testbed/docker__docker-py/setup.py b/testbed/docker__docker-py/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..6d8616331d6b384f3b75529744663d2369033c2f --- /dev/null +++ b/testbed/docker__docker-py/setup.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +import os +import sys +from setuptools import setup + +ROOT_DIR = os.path.dirname(__file__) +SOURCE_DIR = os.path.join(ROOT_DIR) + +requirements = [ + 'requests >= 2.5.2', + 'six >= 1.4.0', + 'websocket-client >= 0.32.0', +] + +exec(open('docker/version.py').read()) + +with open('./test-requirements.txt') as test_reqs_txt: + test_requirements = [line for line in test_reqs_txt] + + +setup( + name="docker-py", + version=version, + description="Python client for Docker.", + url='https://github.com/docker/docker-py/', + packages=[ + 'docker', 'docker.api', 'docker.auth', 'docker.unixconn', + 'docker.utils', 'docker.utils.ports', 'docker.ssladapter' + ], + install_requires=requirements, + tests_require=test_requirements, + zip_safe=False, + test_suite='tests', + classifiers=[ + 'Development Status :: 4 - Beta', + 'Environment :: Other Environment', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', + 'Topic :: Utilities', + 'License :: OSI Approved :: Apache Software License', + ], +) diff --git a/testbed/docker__docker-py/test-requirements.txt b/testbed/docker__docker-py/test-requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..be4998803fb0ff6f21a0f1fce2307e2887518fd6 --- /dev/null +++ b/testbed/docker__docker-py/test-requirements.txt @@ -0,0 +1,5 @@ +mock==1.0.1 +pytest==2.7.2 +coverage==3.7.1 +pytest-cov==2.1.0 +flake8==2.4.1 \ No newline at end of file diff --git a/testbed/docker__docker-py/tests/Dockerfile-dind-certs b/testbed/docker__docker-py/tests/Dockerfile-dind-certs new file mode 100644 index 0000000000000000000000000000000000000000..9e8c042b63d0002c0d71fec98732a2b0dc34a242 --- /dev/null +++ b/testbed/docker__docker-py/tests/Dockerfile-dind-certs @@ -0,0 +1,20 @@ +FROM python:2.7 +RUN mkdir /tmp/certs +VOLUME /certs + +WORKDIR /tmp/certs +RUN openssl genrsa -aes256 -passout pass:foobar -out ca-key.pem 4096 +RUN echo "[req]\nprompt=no\ndistinguished_name = req_distinguished_name\n[req_distinguished_name]\ncountryName=AU" > /tmp/config +RUN openssl req -new -x509 -passin pass:foobar -config /tmp/config -days 365 -key ca-key.pem -sha256 -out ca.pem +RUN openssl genrsa -out server-key.pem -passout pass:foobar 4096 +RUN openssl req -subj "/CN=docker" -sha256 -new -key server-key.pem -out server.csr +RUN echo subjectAltName = DNS:docker,DNS:localhost > extfile.cnf +RUN openssl x509 -req -days 365 -passin pass:foobar -sha256 -in server.csr -CA ca.pem -CAkey ca-key.pem -CAcreateserial -out server-cert.pem -extfile extfile.cnf +RUN openssl genrsa -out key.pem 4096 +RUN openssl req -passin pass:foobar -subj '/CN=client' -new -key key.pem -out client.csr +RUN echo extendedKeyUsage = clientAuth > extfile.cnf +RUN openssl x509 -req -passin pass:foobar -days 365 -sha256 -in client.csr -CA ca.pem -CAkey ca-key.pem -CAcreateserial -out cert.pem -extfile extfile.cnf +RUN chmod -v 0400 ca-key.pem key.pem server-key.pem +RUN chmod -v 0444 ca.pem server-cert.pem cert.pem + +CMD cp -R /tmp/certs/* /certs && while true; do sleep 1; done diff --git a/testbed/docker__docker-py/tests/__init__.py b/testbed/docker__docker-py/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/docker__docker-py/tests/base.py b/testbed/docker__docker-py/tests/base.py new file mode 100644 index 0000000000000000000000000000000000000000..a2c01fc2d8d7bc600a278877d41dfdc9d2bd6e01 --- /dev/null +++ b/testbed/docker__docker-py/tests/base.py @@ -0,0 +1,48 @@ +import sys +import unittest + +import pytest +import six + +import docker + + +class BaseTestCase(unittest.TestCase): + def assertIn(self, object, collection): + if six.PY2 and sys.version_info[1] <= 6: + return self.assertTrue(object in collection) + return super(BaseTestCase, self).assertIn(object, collection) + + +def requires_api_version(version): + return pytest.mark.skipif( + docker.utils.version_lt( + docker.constants.DEFAULT_DOCKER_API_VERSION, version + ), + reason="API version is too low (< {0})".format(version) + ) + + +class Cleanup(object): + if sys.version_info < (2, 7): + # Provide a basic implementation of addCleanup for Python < 2.7 + def __init__(self, *args, **kwargs): + super(Cleanup, self).__init__(*args, **kwargs) + self._cleanups = [] + + def tearDown(self): + super(Cleanup, self).tearDown() + ok = True + while self._cleanups: + fn, args, kwargs = self._cleanups.pop(-1) + try: + fn(*args, **kwargs) + except KeyboardInterrupt: + raise + except: + ok = False + if not ok: + raise + + def addCleanup(self, function, *args, **kwargs): + self._cleanups.append((function, args, kwargs)) diff --git a/testbed/docker__docker-py/tests/helpers.py b/testbed/docker__docker-py/tests/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..21036acead0bac53301c63cf01df7f286405fac0 --- /dev/null +++ b/testbed/docker__docker-py/tests/helpers.py @@ -0,0 +1,185 @@ +import errno +import os +import os.path +import select +import shutil +import struct +import tarfile +import tempfile +import unittest + +import docker +import six + +BUSYBOX = 'busybox:buildroot-2014.02' +EXEC_DRIVER = [] + + +def make_tree(dirs, files): + base = tempfile.mkdtemp() + + for path in dirs: + os.makedirs(os.path.join(base, path)) + + for path in files: + with open(os.path.join(base, path), 'w') as f: + f.write("content") + + return base + + +def simple_tar(path): + f = tempfile.NamedTemporaryFile() + t = tarfile.open(mode='w', fileobj=f) + + abs_path = os.path.abspath(path) + t.add(abs_path, arcname=os.path.basename(path), recursive=False) + + t.close() + f.seek(0) + return f + + +def untar_file(tardata, filename): + with tarfile.open(mode='r', fileobj=tardata) as t: + f = t.extractfile(filename) + result = f.read() + f.close() + return result + + +def exec_driver_is_native(): + global EXEC_DRIVER + if not EXEC_DRIVER: + c = docker_client() + EXEC_DRIVER = c.info()['ExecutionDriver'] + c.close() + return EXEC_DRIVER.startswith('native') + + +def docker_client(**kwargs): + return docker.Client(**docker_client_kwargs(**kwargs)) + + +def docker_client_kwargs(**kwargs): + client_kwargs = docker.utils.kwargs_from_env(assert_hostname=False) + client_kwargs.update(kwargs) + return client_kwargs + + +def read_socket(socket, n=4096): + """ Code stolen from dockerpty to read the socket """ + recoverable_errors = (errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK) + + # wait for data to become available + select.select([socket], [], []) + + try: + if hasattr(socket, 'recv'): + return socket.recv(n) + return os.read(socket.fileno(), n) + except EnvironmentError as e: + if e.errno not in recoverable_errors: + raise + + +def next_packet_size(socket): + """ Code stolen from dockerpty to get the next packet size """ + data = six.binary_type() + while len(data) < 8: + next_data = read_socket(socket, 8 - len(data)) + if not next_data: + return 0 + data = data + next_data + + if data is None: + return 0 + + if len(data) == 8: + _, actual = struct.unpack('>BxxxL', data) + return actual + + +def read_data(socket, packet_size): + data = six.binary_type() + while len(data) < packet_size: + next_data = read_socket(socket, packet_size - len(data)) + if not next_data: + assert False, "Failed trying to read in the dataz" + data += next_data + return data + + +class BaseTestCase(unittest.TestCase): + tmp_imgs = [] + tmp_containers = [] + tmp_folders = [] + tmp_volumes = [] + + def setUp(self): + if six.PY2: + self.assertRegex = self.assertRegexpMatches + self.assertCountEqual = self.assertItemsEqual + self.client = docker_client(timeout=60) + self.tmp_imgs = [] + self.tmp_containers = [] + self.tmp_folders = [] + self.tmp_volumes = [] + self.tmp_networks = [] + + def tearDown(self): + for img in self.tmp_imgs: + try: + self.client.remove_image(img) + except docker.errors.APIError: + pass + for container in self.tmp_containers: + try: + self.client.stop(container, timeout=1) + self.client.remove_container(container) + except docker.errors.APIError: + pass + for network in self.tmp_networks: + try: + self.client.remove_network(network) + except docker.errors.APIError: + pass + for folder in self.tmp_folders: + shutil.rmtree(folder) + + for volume in self.tmp_volumes: + try: + self.client.remove_volume(volume) + except docker.errors.APIError: + pass + + self.client.close() + + def run_container(self, *args, **kwargs): + container = self.client.create_container(*args, **kwargs) + self.tmp_containers.append(container) + self.client.start(container) + exitcode = self.client.wait(container) + + if exitcode != 0: + output = self.client.logs(container) + raise Exception( + "Container exited with code {}:\n{}" + .format(exitcode, output)) + + return container + + def create_and_start(self, image='busybox', command='top', **kwargs): + container = self.client.create_container( + image=image, command=command, **kwargs) + self.tmp_containers.append(container) + self.client.start(container) + return container + + def execute(self, container, cmd, exit_code=0, **kwargs): + exc = self.client.exec_create(container, cmd, **kwargs) + output = self.client.exec_start(exc) + actual_exit_code = self.client.exec_inspect(exc)['ExitCode'] + msg = "Expected `{}` to exit with code {} but returned {}:\n{}".format( + " ".join(cmd), exit_code, actual_exit_code, output) + assert actual_exit_code == exit_code, msg diff --git a/testbed/docker__docker-py/tests/integration/__init__.py b/testbed/docker__docker-py/tests/integration/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/docker__docker-py/tests/integration/api_test.py b/testbed/docker__docker-py/tests/integration/api_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e120c841a8b57375b24db0af0b6dae19a41a0a11 --- /dev/null +++ b/testbed/docker__docker-py/tests/integration/api_test.py @@ -0,0 +1,176 @@ +import base64 +import os +import tempfile +import time +import unittest +import warnings + +import docker + +from .. import helpers + + +class InformationTest(helpers.BaseTestCase): + def test_version(self): + res = self.client.version() + self.assertIn('GoVersion', res) + self.assertIn('Version', res) + self.assertEqual(len(res['Version'].split('.')), 3) + + def test_info(self): + res = self.client.info() + self.assertIn('Containers', res) + self.assertIn('Images', res) + self.assertIn('Debug', res) + + def test_search(self): + self.client = helpers.docker_client(timeout=10) + res = self.client.search('busybox') + self.assertTrue(len(res) >= 1) + base_img = [x for x in res if x['name'] == 'busybox'] + self.assertEqual(len(base_img), 1) + self.assertIn('description', base_img[0]) + + +class LinkTest(helpers.BaseTestCase): + def test_remove_link(self): + # Create containers + container1 = self.client.create_container( + helpers.BUSYBOX, 'cat', detach=True, stdin_open=True + ) + container1_id = container1['Id'] + self.tmp_containers.append(container1_id) + self.client.start(container1_id) + + # Create Link + # we don't want the first / + link_path = self.client.inspect_container(container1_id)['Name'][1:] + link_alias = 'mylink' + + container2 = self.client.create_container( + helpers.BUSYBOX, 'cat', host_config=self.client.create_host_config( + links={link_path: link_alias}, network_mode='none' + ) + ) + container2_id = container2['Id'] + self.tmp_containers.append(container2_id) + self.client.start(container2_id) + + # Remove link + linked_name = self.client.inspect_container(container2_id)['Name'][1:] + link_name = '%s/%s' % (linked_name, link_alias) + self.client.remove_container(link_name, link=True) + + # Link is gone + containers = self.client.containers(all=True) + retrieved = [x for x in containers if link_name in x['Names']] + self.assertEqual(len(retrieved), 0) + + # Containers are still there + retrieved = [ + x for x in containers if x['Id'].startswith(container1_id) or + x['Id'].startswith(container2_id) + ] + self.assertEqual(len(retrieved), 2) + + +class LoadConfigTest(helpers.BaseTestCase): + def test_load_legacy_config(self): + folder = tempfile.mkdtemp() + self.tmp_folders.append(folder) + cfg_path = os.path.join(folder, '.dockercfg') + f = open(cfg_path, 'w') + auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') + f.write('auth = {0}\n'.format(auth_)) + f.write('email = sakuya@scarlet.net') + f.close() + cfg = docker.auth.load_config(cfg_path) + self.assertNotEqual(cfg[docker.auth.INDEX_NAME], None) + cfg = cfg[docker.auth.INDEX_NAME] + self.assertEqual(cfg['username'], 'sakuya') + self.assertEqual(cfg['password'], 'izayoi') + self.assertEqual(cfg['email'], 'sakuya@scarlet.net') + self.assertEqual(cfg.get('Auth'), None) + + def test_load_json_config(self): + folder = tempfile.mkdtemp() + self.tmp_folders.append(folder) + cfg_path = os.path.join(folder, '.dockercfg') + f = open(os.path.join(folder, '.dockercfg'), 'w') + auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') + email_ = 'sakuya@scarlet.net' + f.write('{{"{0}": {{"auth": "{1}", "email": "{2}"}}}}\n'.format( + docker.auth.INDEX_URL, auth_, email_)) + f.close() + cfg = docker.auth.load_config(cfg_path) + self.assertNotEqual(cfg[docker.auth.INDEX_URL], None) + cfg = cfg[docker.auth.INDEX_URL] + self.assertEqual(cfg['username'], 'sakuya') + self.assertEqual(cfg['password'], 'izayoi') + self.assertEqual(cfg['email'], 'sakuya@scarlet.net') + self.assertEqual(cfg.get('Auth'), None) + + +class AutoDetectVersionTest(unittest.TestCase): + def test_client_init(self): + client = helpers.docker_client(version='auto') + client_version = client._version + api_version = client.version(api_version=False)['ApiVersion'] + self.assertEqual(client_version, api_version) + api_version_2 = client.version()['ApiVersion'] + self.assertEqual(client_version, api_version_2) + client.close() + + def test_auto_client(self): + client = docker.AutoVersionClient(**helpers.docker_client_kwargs()) + client_version = client._version + api_version = client.version(api_version=False)['ApiVersion'] + self.assertEqual(client_version, api_version) + api_version_2 = client.version()['ApiVersion'] + self.assertEqual(client_version, api_version_2) + client.close() + with self.assertRaises(docker.errors.DockerException): + docker.AutoVersionClient( + **helpers.docker_client_kwargs(version='1.11') + ) + + +class ConnectionTimeoutTest(unittest.TestCase): + def setUp(self): + self.timeout = 0.5 + self.client = docker.client.Client(base_url='http://192.168.10.2:4243', + timeout=self.timeout) + + def test_timeout(self): + start = time.time() + res = None + # This call isn't supposed to complete, and it should fail fast. + try: + res = self.client.inspect_container('id') + except: + pass + end = time.time() + self.assertTrue(res is None) + self.assertTrue(end - start < 2 * self.timeout) + + +class UnixconnTest(unittest.TestCase): + """ + Test UNIX socket connection adapter. + """ + + def test_resource_warnings(self): + """ + Test no warnings are produced when using the client. + """ + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + + client = helpers.docker_client() + client.images() + client.close() + del client + + assert len(w) == 0, \ + "No warnings produced: {0}".format(w[0].message) diff --git a/testbed/docker__docker-py/tests/integration/build_test.py b/testbed/docker__docker-py/tests/integration/build_test.py new file mode 100644 index 0000000000000000000000000000000000000000..26164ae009df84fe286b8b6a1dc4eb5d6f349339 --- /dev/null +++ b/testbed/docker__docker-py/tests/integration/build_test.py @@ -0,0 +1,140 @@ +import io +import json +import os +import shutil +import tempfile + +import six + +from .. import helpers +from ..base import requires_api_version + + +class BuildTest(helpers.BaseTestCase): + def test_build_streaming(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + stream = self.client.build(fileobj=script, stream=True) + logs = '' + for chunk in stream: + if six.PY3: + chunk = chunk.decode('utf-8') + json.loads(chunk) # ensure chunk is a single, valid JSON blob + logs += chunk + self.assertNotEqual(logs, '') + + def test_build_from_stringio(self): + if six.PY3: + return + script = io.StringIO(six.text_type('\n').join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ])) + stream = self.client.build(fileobj=script, stream=True) + logs = '' + for chunk in stream: + if six.PY3: + chunk = chunk.decode('utf-8') + logs += chunk + self.assertNotEqual(logs, '') + + @requires_api_version('1.8') + def test_build_with_dockerignore(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + + with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f: + f.write("\n".join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'ADD . /test', + ])) + + with open(os.path.join(base_dir, '.dockerignore'), 'w') as f: + f.write("\n".join([ + 'ignored', + 'Dockerfile', + '.dockerignore', + '!ignored/subdir/excepted-file', + '', # empty line + ])) + + with open(os.path.join(base_dir, 'not-ignored'), 'w') as f: + f.write("this file should not be ignored") + + subdir = os.path.join(base_dir, 'ignored', 'subdir') + os.makedirs(subdir) + with open(os.path.join(subdir, 'file'), 'w') as f: + f.write("this file should be ignored") + + with open(os.path.join(subdir, 'excepted-file'), 'w') as f: + f.write("this file should not be ignored") + + tag = 'docker-py-test-build-with-dockerignore' + stream = self.client.build( + path=base_dir, + tag=tag, + ) + for chunk in stream: + pass + + c = self.client.create_container(tag, ['find', '/test', '-type', 'f']) + self.client.start(c) + self.client.wait(c) + logs = self.client.logs(c) + + if six.PY3: + logs = logs.decode('utf-8') + + self.assertEqual( + sorted(list(filter(None, logs.split('\n')))), + sorted(['/test/ignored/subdir/excepted-file', + '/test/not-ignored']), + ) + + @requires_api_version('1.21') + def test_build_with_buildargs(self): + script = io.BytesIO('\n'.join([ + 'FROM scratch', + 'ARG test', + 'USER $test' + ]).encode('ascii')) + + stream = self.client.build( + fileobj=script, tag='buildargs', buildargs={'test': 'OK'} + ) + self.tmp_imgs.append('buildargs') + for chunk in stream: + pass + + info = self.client.inspect_image('buildargs') + self.assertEqual(info['Config']['User'], 'OK') + + def test_build_stderr_data(self): + control_chars = ['\x1b[91m', '\x1b[0m'] + snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)' + script = io.BytesIO(b'\n'.join([ + b'FROM busybox', + 'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8') + ])) + + stream = self.client.build( + fileobj=script, stream=True, decode=True, nocache=True + ) + lines = [] + for chunk in stream: + lines.append(chunk.get('stream')) + expected = '{0}{2}\n{1}'.format( + control_chars[0], control_chars[1], snippet + ) + self.assertTrue(any([line == expected for line in lines])) diff --git a/testbed/docker__docker-py/tests/integration/conftest.py b/testbed/docker__docker-py/tests/integration/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..b17419504e6afdde42307e0ea4b36e70f0a9f04c --- /dev/null +++ b/testbed/docker__docker-py/tests/integration/conftest.py @@ -0,0 +1,31 @@ +from __future__ import print_function + +import json +import sys +import warnings + +import docker.errors +import pytest + +from ..helpers import BUSYBOX +from ..helpers import docker_client + + +@pytest.fixture(autouse=True, scope='session') +def setup_test_session(): + warnings.simplefilter('error') + c = docker_client() + try: + c.inspect_image(BUSYBOX) + except docker.errors.NotFound: + print("\npulling {0}".format(BUSYBOX), file=sys.stderr) + for data in c.pull(BUSYBOX, stream=True): + data = json.loads(data.decode('utf-8')) + status = data.get("status") + progress = data.get("progress") + detail = "{0} - {1}".format(status, progress) + print(detail, file=sys.stderr) + + # Double make sure we now have busybox + c.inspect_image(BUSYBOX) + c.close() diff --git a/testbed/docker__docker-py/tests/integration/container_test.py b/testbed/docker__docker-py/tests/integration/container_test.py new file mode 100644 index 0000000000000000000000000000000000000000..a1498f937448de1cab4eec2cb387d4109e3144a1 --- /dev/null +++ b/testbed/docker__docker-py/tests/integration/container_test.py @@ -0,0 +1,1063 @@ +import os +import signal +import tempfile + +import docker +import pytest +import six + +from ..base import requires_api_version +from .. import helpers + +BUSYBOX = helpers.BUSYBOX + + +class ListContainersTest(helpers.BaseTestCase): + def test_list_containers(self): + res0 = self.client.containers(all=True) + size = len(res0) + res1 = self.client.create_container(BUSYBOX, 'true') + self.assertIn('Id', res1) + self.client.start(res1['Id']) + self.tmp_containers.append(res1['Id']) + res2 = self.client.containers(all=True) + self.assertEqual(size + 1, len(res2)) + retrieved = [x for x in res2 if x['Id'].startswith(res1['Id'])] + self.assertEqual(len(retrieved), 1) + retrieved = retrieved[0] + self.assertIn('Command', retrieved) + self.assertEqual(retrieved['Command'], six.text_type('true')) + self.assertIn('Image', retrieved) + self.assertRegex(retrieved['Image'], r'busybox:.*') + self.assertIn('Status', retrieved) + + +class CreateContainerTest(helpers.BaseTestCase): + + def test_create(self): + res = self.client.create_container(BUSYBOX, 'true') + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + + def test_create_with_host_pid_mode(self): + ctnr = self.client.create_container( + BUSYBOX, 'true', host_config=self.client.create_host_config( + pid_mode='host', network_mode='none' + ) + ) + self.assertIn('Id', ctnr) + self.tmp_containers.append(ctnr['Id']) + self.client.start(ctnr) + inspect = self.client.inspect_container(ctnr) + self.assertIn('HostConfig', inspect) + host_config = inspect['HostConfig'] + self.assertIn('PidMode', host_config) + self.assertEqual(host_config['PidMode'], 'host') + + def test_create_with_links(self): + res0 = self.client.create_container( + BUSYBOX, 'cat', + detach=True, stdin_open=True, + environment={'FOO': '1'}) + + container1_id = res0['Id'] + self.tmp_containers.append(container1_id) + + self.client.start(container1_id) + + res1 = self.client.create_container( + BUSYBOX, 'cat', + detach=True, stdin_open=True, + environment={'FOO': '1'}) + + container2_id = res1['Id'] + self.tmp_containers.append(container2_id) + + self.client.start(container2_id) + + # we don't want the first / + link_path1 = self.client.inspect_container(container1_id)['Name'][1:] + link_alias1 = 'mylink1' + link_env_prefix1 = link_alias1.upper() + + link_path2 = self.client.inspect_container(container2_id)['Name'][1:] + link_alias2 = 'mylink2' + link_env_prefix2 = link_alias2.upper() + + res2 = self.client.create_container( + BUSYBOX, 'env', host_config=self.client.create_host_config( + links={link_path1: link_alias1, link_path2: link_alias2}, + network_mode='bridge' + ) + ) + container3_id = res2['Id'] + self.tmp_containers.append(container3_id) + self.client.start(container3_id) + self.assertEqual(self.client.wait(container3_id), 0) + + logs = self.client.logs(container3_id) + if six.PY3: + logs = logs.decode('utf-8') + self.assertIn('{0}_NAME='.format(link_env_prefix1), logs) + self.assertIn('{0}_ENV_FOO=1'.format(link_env_prefix1), logs) + self.assertIn('{0}_NAME='.format(link_env_prefix2), logs) + self.assertIn('{0}_ENV_FOO=1'.format(link_env_prefix2), logs) + + def test_create_with_restart_policy(self): + container = self.client.create_container( + BUSYBOX, ['sleep', '2'], + host_config=self.client.create_host_config( + restart_policy={"Name": "always", "MaximumRetryCount": 0}, + network_mode='none' + ) + ) + id = container['Id'] + self.client.start(id) + self.client.wait(id) + with self.assertRaises(docker.errors.APIError) as exc: + self.client.remove_container(id) + err = exc.exception.response.text + self.assertIn( + 'You cannot remove a running container', err + ) + self.client.remove_container(id, force=True) + + def test_create_container_with_volumes_from(self): + vol_names = ['foobar_vol0', 'foobar_vol1'] + + res0 = self.client.create_container( + BUSYBOX, 'true', name=vol_names[0] + ) + container1_id = res0['Id'] + self.tmp_containers.append(container1_id) + self.client.start(container1_id) + + res1 = self.client.create_container( + BUSYBOX, 'true', name=vol_names[1] + ) + container2_id = res1['Id'] + self.tmp_containers.append(container2_id) + self.client.start(container2_id) + with self.assertRaises(docker.errors.DockerException): + self.client.create_container( + BUSYBOX, 'cat', detach=True, stdin_open=True, + volumes_from=vol_names + ) + res2 = self.client.create_container( + BUSYBOX, 'cat', detach=True, stdin_open=True, + host_config=self.client.create_host_config( + volumes_from=vol_names, network_mode='none' + ) + ) + container3_id = res2['Id'] + self.tmp_containers.append(container3_id) + self.client.start(container3_id) + + info = self.client.inspect_container(res2['Id']) + self.assertCountEqual(info['HostConfig']['VolumesFrom'], vol_names) + + def create_container_readonly_fs(self): + if not helpers.exec_driver_is_native(): + pytest.skip('Exec driver not native') + + ctnr = self.client.create_container( + BUSYBOX, ['mkdir', '/shrine'], + host_config=self.client.create_host_config( + read_only=True, network_mode='none' + ) + ) + self.assertIn('Id', ctnr) + self.tmp_containers.append(ctnr['Id']) + self.client.start(ctnr) + res = self.client.wait(ctnr) + self.assertNotEqual(res, 0) + + def create_container_with_name(self): + res = self.client.create_container(BUSYBOX, 'true', name='foobar') + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + inspect = self.client.inspect_container(res['Id']) + self.assertIn('Name', inspect) + self.assertEqual('/foobar', inspect['Name']) + + def create_container_privileged(self): + res = self.client.create_container( + BUSYBOX, 'true', host_config=self.client.create_host_config( + privileged=True, network_mode='none' + ) + ) + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + self.client.start(res['Id']) + inspect = self.client.inspect_container(res['Id']) + self.assertIn('Config', inspect) + self.assertIn('Id', inspect) + self.assertTrue(inspect['Id'].startswith(res['Id'])) + self.assertIn('Image', inspect) + self.assertIn('State', inspect) + self.assertIn('Running', inspect['State']) + if not inspect['State']['Running']: + self.assertIn('ExitCode', inspect['State']) + self.assertEqual(inspect['State']['ExitCode'], 0) + # Since Nov 2013, the Privileged flag is no longer part of the + # container's config exposed via the API (safety concerns?). + # + if 'Privileged' in inspect['Config']: + self.assertEqual(inspect['Config']['Privileged'], True) + + def test_create_with_mac_address(self): + mac_address_expected = "02:42:ac:11:00:0a" + container = self.client.create_container( + BUSYBOX, ['sleep', '60'], mac_address=mac_address_expected) + + id = container['Id'] + + self.client.start(container) + res = self.client.inspect_container(container['Id']) + self.assertEqual(mac_address_expected, + res['NetworkSettings']['MacAddress']) + + self.client.kill(id) + + @requires_api_version('1.20') + def test_group_id_ints(self): + container = self.client.create_container( + BUSYBOX, 'id -G', + host_config=self.client.create_host_config(group_add=[1000, 1001]) + ) + self.tmp_containers.append(container) + self.client.start(container) + self.client.wait(container) + + logs = self.client.logs(container) + if six.PY3: + logs = logs.decode('utf-8') + groups = logs.strip().split(' ') + self.assertIn('1000', groups) + self.assertIn('1001', groups) + + @requires_api_version('1.20') + def test_group_id_strings(self): + container = self.client.create_container( + BUSYBOX, 'id -G', host_config=self.client.create_host_config( + group_add=['1000', '1001'] + ) + ) + self.tmp_containers.append(container) + self.client.start(container) + self.client.wait(container) + + logs = self.client.logs(container) + if six.PY3: + logs = logs.decode('utf-8') + + groups = logs.strip().split(' ') + self.assertIn('1000', groups) + self.assertIn('1001', groups) + + def test_valid_log_driver_and_log_opt(self): + log_config = docker.utils.LogConfig( + type='json-file', + config={'max-file': '100'} + ) + + container = self.client.create_container( + BUSYBOX, ['true'], + host_config=self.client.create_host_config(log_config=log_config) + ) + self.tmp_containers.append(container['Id']) + self.client.start(container) + + info = self.client.inspect_container(container) + container_log_config = info['HostConfig']['LogConfig'] + + self.assertEqual(container_log_config['Type'], log_config.type) + self.assertEqual(container_log_config['Config'], log_config.config) + + def test_invalid_log_driver_raises_exception(self): + log_config = docker.utils.LogConfig( + type='asdf-nope', + config={} + ) + + expected_msg = "logger: no log driver named 'asdf-nope' is registered" + with pytest.raises(docker.errors.APIError) as excinfo: + # raises an internal server error 500 + container = self.client.create_container( + BUSYBOX, ['true'], host_config=self.client.create_host_config( + log_config=log_config + ) + ) + self.client.start(container) + + assert expected_msg in str(excinfo.value) + + def test_valid_no_log_driver_specified(self): + log_config = docker.utils.LogConfig( + type="", + config={'max-file': '100'} + ) + + container = self.client.create_container( + BUSYBOX, ['true'], + host_config=self.client.create_host_config(log_config=log_config) + ) + self.tmp_containers.append(container['Id']) + self.client.start(container) + + info = self.client.inspect_container(container) + container_log_config = info['HostConfig']['LogConfig'] + + self.assertEqual(container_log_config['Type'], "json-file") + self.assertEqual(container_log_config['Config'], log_config.config) + + def test_valid_no_config_specified(self): + log_config = docker.utils.LogConfig( + type="json-file", + config=None + ) + + container = self.client.create_container( + BUSYBOX, ['true'], + host_config=self.client.create_host_config(log_config=log_config) + ) + self.tmp_containers.append(container['Id']) + self.client.start(container) + + info = self.client.inspect_container(container) + container_log_config = info['HostConfig']['LogConfig'] + + self.assertEqual(container_log_config['Type'], "json-file") + self.assertEqual(container_log_config['Config'], {}) + + def test_create_with_memory_constraints_with_str(self): + ctnr = self.client.create_container( + BUSYBOX, 'true', + host_config=self.client.create_host_config( + memswap_limit='1G', + mem_limit='700M' + ) + ) + self.assertIn('Id', ctnr) + self.tmp_containers.append(ctnr['Id']) + self.client.start(ctnr) + inspect = self.client.inspect_container(ctnr) + + self.assertIn('HostConfig', inspect) + host_config = inspect['HostConfig'] + for limit in ['Memory', 'MemorySwap']: + self.assertIn(limit, host_config) + + def test_create_with_memory_constraints_with_int(self): + ctnr = self.client.create_container( + BUSYBOX, 'true', + host_config=self.client.create_host_config(mem_swappiness=40) + ) + self.assertIn('Id', ctnr) + self.tmp_containers.append(ctnr['Id']) + self.client.start(ctnr) + inspect = self.client.inspect_container(ctnr) + + self.assertIn('HostConfig', inspect) + host_config = inspect['HostConfig'] + self.assertIn('MemorySwappiness', host_config) + + def test_create_host_config_exception_raising(self): + self.assertRaises(TypeError, + self.client.create_host_config, mem_swappiness='40') + + self.assertRaises(ValueError, + self.client.create_host_config, pid_mode='40') + + def test_create_with_environment_variable_no_value(self): + container = self.client.create_container( + BUSYBOX, + ['echo'], + environment={'Foo': None, 'Other': 'one', 'Blank': ''}, + ) + self.tmp_containers.append(container['Id']) + config = self.client.inspect_container(container['Id']) + assert ( + sorted(config['Config']['Env']) == + sorted(['Foo', 'Other=one', 'Blank=']) + ) + + +class VolumeBindTest(helpers.BaseTestCase): + def setUp(self): + super(VolumeBindTest, self).setUp() + + self.mount_dest = '/mnt' + + # Get a random pathname - we don't need it to exist locally + self.mount_origin = tempfile.mkdtemp() + self.filename = 'shared.txt' + + self.run_with_volume( + False, + BUSYBOX, + ['touch', os.path.join(self.mount_dest, self.filename)], + ) + + def test_create_with_binds_rw(self): + + container = self.run_with_volume( + False, + BUSYBOX, + ['ls', self.mount_dest], + ) + logs = self.client.logs(container) + + if six.PY3: + logs = logs.decode('utf-8') + self.assertIn(self.filename, logs) + inspect_data = self.client.inspect_container(container) + self.check_container_data(inspect_data, True) + + def test_create_with_binds_ro(self): + self.run_with_volume( + False, + BUSYBOX, + ['touch', os.path.join(self.mount_dest, self.filename)], + ) + container = self.run_with_volume( + True, + BUSYBOX, + ['ls', self.mount_dest], + ) + logs = self.client.logs(container) + + if six.PY3: + logs = logs.decode('utf-8') + self.assertIn(self.filename, logs) + + inspect_data = self.client.inspect_container(container) + self.check_container_data(inspect_data, False) + + def check_container_data(self, inspect_data, rw): + if docker.utils.compare_version('1.20', self.client._version) < 0: + self.assertIn('Volumes', inspect_data) + self.assertIn(self.mount_dest, inspect_data['Volumes']) + self.assertEqual( + self.mount_origin, inspect_data['Volumes'][self.mount_dest] + ) + self.assertIn(self.mount_dest, inspect_data['VolumesRW']) + self.assertFalse(inspect_data['VolumesRW'][self.mount_dest]) + else: + self.assertIn('Mounts', inspect_data) + filtered = list(filter( + lambda x: x['Destination'] == self.mount_dest, + inspect_data['Mounts'] + )) + self.assertEqual(len(filtered), 1) + mount_data = filtered[0] + self.assertEqual(mount_data['Source'], self.mount_origin) + self.assertEqual(mount_data['RW'], rw) + + def run_with_volume(self, ro, *args, **kwargs): + return self.run_container( + *args, + volumes={self.mount_dest: {}}, + host_config=self.client.create_host_config( + binds={ + self.mount_origin: { + 'bind': self.mount_dest, + 'ro': ro, + }, + }, + network_mode='none' + ), + **kwargs + ) + + +@requires_api_version('1.20') +class ArchiveTest(helpers.BaseTestCase): + def test_get_file_archive_from_container(self): + data = 'The Maid and the Pocket Watch of Blood' + ctnr = self.client.create_container( + BUSYBOX, 'sh -c "echo {0} > /vol1/data.txt"'.format(data), + volumes=['/vol1'] + ) + self.tmp_containers.append(ctnr) + self.client.start(ctnr) + self.client.wait(ctnr) + with tempfile.NamedTemporaryFile() as destination: + strm, stat = self.client.get_archive(ctnr, '/vol1/data.txt') + for d in strm: + destination.write(d) + destination.seek(0) + retrieved_data = helpers.untar_file(destination, 'data.txt') + if six.PY3: + retrieved_data = retrieved_data.decode('utf-8') + self.assertEqual(data, retrieved_data.strip()) + + def test_get_file_stat_from_container(self): + data = 'The Maid and the Pocket Watch of Blood' + ctnr = self.client.create_container( + BUSYBOX, 'sh -c "echo -n {0} > /vol1/data.txt"'.format(data), + volumes=['/vol1'] + ) + self.tmp_containers.append(ctnr) + self.client.start(ctnr) + self.client.wait(ctnr) + strm, stat = self.client.get_archive(ctnr, '/vol1/data.txt') + self.assertIn('name', stat) + self.assertEqual(stat['name'], 'data.txt') + self.assertIn('size', stat) + self.assertEqual(stat['size'], len(data)) + + def test_copy_file_to_container(self): + data = b'Deaf To All But The Song' + with tempfile.NamedTemporaryFile() as test_file: + test_file.write(data) + test_file.seek(0) + ctnr = self.client.create_container( + BUSYBOX, + 'cat {0}'.format( + os.path.join('/vol1', os.path.basename(test_file.name)) + ), + volumes=['/vol1'] + ) + self.tmp_containers.append(ctnr) + with helpers.simple_tar(test_file.name) as test_tar: + self.client.put_archive(ctnr, '/vol1', test_tar) + self.client.start(ctnr) + self.client.wait(ctnr) + logs = self.client.logs(ctnr) + if six.PY3: + logs = logs.decode('utf-8') + data = data.decode('utf-8') + self.assertEqual(logs.strip(), data) + + def test_copy_directory_to_container(self): + files = ['a.py', 'b.py', 'foo/b.py'] + dirs = ['foo', 'bar'] + base = helpers.make_tree(dirs, files) + ctnr = self.client.create_container( + BUSYBOX, 'ls -p /vol1', volumes=['/vol1'] + ) + self.tmp_containers.append(ctnr) + with docker.utils.tar(base) as test_tar: + self.client.put_archive(ctnr, '/vol1', test_tar) + self.client.start(ctnr) + self.client.wait(ctnr) + logs = self.client.logs(ctnr) + if six.PY3: + logs = logs.decode('utf-8') + results = logs.strip().split() + self.assertIn('a.py', results) + self.assertIn('b.py', results) + self.assertIn('foo/', results) + self.assertIn('bar/', results) + + +class RenameContainerTest(helpers.BaseTestCase): + def test_rename_container(self): + version = self.client.version()['Version'] + name = 'hong_meiling' + res = self.client.create_container(BUSYBOX, 'true') + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + self.client.rename(res, name) + inspect = self.client.inspect_container(res['Id']) + self.assertIn('Name', inspect) + if version == '1.5.0': + self.assertEqual(name, inspect['Name']) + else: + self.assertEqual('/{0}'.format(name), inspect['Name']) + + +class StartContainerTest(helpers.BaseTestCase): + def test_start_container(self): + res = self.client.create_container(BUSYBOX, 'true') + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + self.client.start(res['Id']) + inspect = self.client.inspect_container(res['Id']) + self.assertIn('Config', inspect) + self.assertIn('Id', inspect) + self.assertTrue(inspect['Id'].startswith(res['Id'])) + self.assertIn('Image', inspect) + self.assertIn('State', inspect) + self.assertIn('Running', inspect['State']) + if not inspect['State']['Running']: + self.assertIn('ExitCode', inspect['State']) + self.assertEqual(inspect['State']['ExitCode'], 0) + + def test_start_container_with_dict_instead_of_id(self): + res = self.client.create_container(BUSYBOX, 'true') + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + self.client.start(res) + inspect = self.client.inspect_container(res['Id']) + self.assertIn('Config', inspect) + self.assertIn('Id', inspect) + self.assertTrue(inspect['Id'].startswith(res['Id'])) + self.assertIn('Image', inspect) + self.assertIn('State', inspect) + self.assertIn('Running', inspect['State']) + if not inspect['State']['Running']: + self.assertIn('ExitCode', inspect['State']) + self.assertEqual(inspect['State']['ExitCode'], 0) + + def test_run_shlex_commands(self): + commands = [ + 'true', + 'echo "The Young Descendant of Tepes & Septette for the ' + 'Dead Princess"', + 'echo -n "The Young Descendant of Tepes & Septette for the ' + 'Dead Princess"', + '/bin/sh -c "echo Hello World"', + '/bin/sh -c \'echo "Hello World"\'', + 'echo "\"Night of Nights\""', + 'true && echo "Night of Nights"' + ] + for cmd in commands: + container = self.client.create_container(BUSYBOX, cmd) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0, msg=cmd) + + +class WaitTest(helpers.BaseTestCase): + def test_wait(self): + res = self.client.create_container(BUSYBOX, ['sleep', '3']) + id = res['Id'] + self.tmp_containers.append(id) + self.client.start(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + inspect = self.client.inspect_container(id) + self.assertIn('Running', inspect['State']) + self.assertEqual(inspect['State']['Running'], False) + self.assertIn('ExitCode', inspect['State']) + self.assertEqual(inspect['State']['ExitCode'], exitcode) + + def test_wait_with_dict_instead_of_id(self): + res = self.client.create_container(BUSYBOX, ['sleep', '3']) + id = res['Id'] + self.tmp_containers.append(id) + self.client.start(res) + exitcode = self.client.wait(res) + self.assertEqual(exitcode, 0) + inspect = self.client.inspect_container(res) + self.assertIn('Running', inspect['State']) + self.assertEqual(inspect['State']['Running'], False) + self.assertIn('ExitCode', inspect['State']) + self.assertEqual(inspect['State']['ExitCode'], exitcode) + + +class LogsTest(helpers.BaseTestCase): + def test_logs(self): + snippet = 'Flowering Nights (Sakuya Iyazoi)' + container = self.client.create_container( + BUSYBOX, 'echo {0}'.format(snippet) + ) + id = container['Id'] + self.tmp_containers.append(id) + self.client.start(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + logs = self.client.logs(id) + self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii')) + + def test_logs_tail_option(self): + snippet = '''Line1 +Line2''' + container = self.client.create_container( + BUSYBOX, 'echo "{0}"'.format(snippet) + ) + id = container['Id'] + self.tmp_containers.append(id) + self.client.start(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + logs = self.client.logs(id, tail=1) + self.assertEqual(logs, 'Line2\n'.encode(encoding='ascii')) + + def test_logs_streaming_and_follow(self): + snippet = 'Flowering Nights (Sakuya Iyazoi)' + container = self.client.create_container( + BUSYBOX, 'echo {0}'.format(snippet) + ) + id = container['Id'] + self.tmp_containers.append(id) + self.client.start(id) + logs = six.binary_type() + for chunk in self.client.logs(id, stream=True, follow=True): + logs += chunk + + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + + self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii')) + + def test_logs_with_dict_instead_of_id(self): + snippet = 'Flowering Nights (Sakuya Iyazoi)' + container = self.client.create_container( + BUSYBOX, 'echo {0}'.format(snippet) + ) + id = container['Id'] + self.tmp_containers.append(id) + self.client.start(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + logs = self.client.logs(container) + self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii')) + + def test_logs_with_tail_0(self): + snippet = 'Flowering Nights (Sakuya Iyazoi)' + container = self.client.create_container( + BUSYBOX, 'echo "{0}"'.format(snippet) + ) + id = container['Id'] + self.tmp_containers.append(id) + self.client.start(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + logs = self.client.logs(id, tail=0) + self.assertEqual(logs, ''.encode(encoding='ascii')) + + +class DiffTest(helpers.BaseTestCase): + def test_diff(self): + container = self.client.create_container(BUSYBOX, ['touch', '/test']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + diff = self.client.diff(id) + test_diff = [x for x in diff if x.get('Path', None) == '/test'] + self.assertEqual(len(test_diff), 1) + self.assertIn('Kind', test_diff[0]) + self.assertEqual(test_diff[0]['Kind'], 1) + + def test_diff_with_dict_instead_of_id(self): + container = self.client.create_container(BUSYBOX, ['touch', '/test']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + diff = self.client.diff(container) + test_diff = [x for x in diff if x.get('Path', None) == '/test'] + self.assertEqual(len(test_diff), 1) + self.assertIn('Kind', test_diff[0]) + self.assertEqual(test_diff[0]['Kind'], 1) + + +class StopTest(helpers.BaseTestCase): + def test_stop(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + self.client.stop(id, timeout=2) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('Running', state) + self.assertEqual(state['Running'], False) + + def test_stop_with_dict_instead_of_id(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + self.assertIn('Id', container) + id = container['Id'] + self.client.start(container) + self.tmp_containers.append(id) + self.client.stop(container, timeout=2) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('Running', state) + self.assertEqual(state['Running'], False) + + +class KillTest(helpers.BaseTestCase): + def test_kill(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + self.client.kill(id) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + if helpers.exec_driver_is_native(): + self.assertNotEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], False) + + def test_kill_with_dict_instead_of_id(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + self.client.kill(container) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + if helpers.exec_driver_is_native(): + self.assertNotEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], False) + + def test_kill_with_signal(self): + container = self.client.create_container(BUSYBOX, ['sleep', '60']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + self.client.kill(id, signal=signal.SIGKILL) + exitcode = self.client.wait(id) + self.assertNotEqual(exitcode, 0) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + self.assertNotEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], False, state) + + +class PortTest(helpers.BaseTestCase): + def test_port(self): + + port_bindings = { + '1111': ('127.0.0.1', '4567'), + '2222': ('127.0.0.1', '4568') + } + + container = self.client.create_container( + BUSYBOX, ['sleep', '60'], ports=list(port_bindings.keys()), + host_config=self.client.create_host_config( + port_bindings=port_bindings, network_mode='bridge' + ) + ) + id = container['Id'] + + self.client.start(container) + + # Call the port function on each biding and compare expected vs actual + for port in port_bindings: + actual_bindings = self.client.port(container, port) + port_binding = actual_bindings.pop() + + ip, host_port = port_binding['HostIp'], port_binding['HostPort'] + + self.assertEqual(ip, port_bindings[port][0]) + self.assertEqual(host_port, port_bindings[port][1]) + + self.client.kill(id) + + +class ContainerTopTest(helpers.BaseTestCase): + def test_top(self): + container = self.client.create_container( + BUSYBOX, ['sleep', '60']) + + id = container['Id'] + + self.client.start(container) + res = self.client.top(container['Id']) + self.assertEqual( + res['Titles'], + ['UID', 'PID', 'PPID', 'C', 'STIME', 'TTY', 'TIME', 'CMD'] + ) + self.assertEqual(len(res['Processes']), 1) + self.assertEqual(res['Processes'][0][7], 'sleep 60') + self.client.kill(id) + + def test_top_with_psargs(self): + container = self.client.create_container( + BUSYBOX, ['sleep', '60']) + + id = container['Id'] + + self.client.start(container) + res = self.client.top(container['Id'], 'waux') + self.assertEqual( + res['Titles'], + ['USER', 'PID', '%CPU', '%MEM', 'VSZ', 'RSS', + 'TTY', 'STAT', 'START', 'TIME', 'COMMAND'], + ) + self.assertEqual(len(res['Processes']), 1) + self.assertEqual(res['Processes'][0][10], 'sleep 60') + self.client.kill(id) + + +class RestartContainerTest(helpers.BaseTestCase): + def test_restart(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + info = self.client.inspect_container(id) + self.assertIn('State', info) + self.assertIn('StartedAt', info['State']) + start_time1 = info['State']['StartedAt'] + self.client.restart(id, timeout=2) + info2 = self.client.inspect_container(id) + self.assertIn('State', info2) + self.assertIn('StartedAt', info2['State']) + start_time2 = info2['State']['StartedAt'] + self.assertNotEqual(start_time1, start_time2) + self.assertIn('Running', info2['State']) + self.assertEqual(info2['State']['Running'], True) + self.client.kill(id) + + def test_restart_with_dict_instead_of_id(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + self.assertIn('Id', container) + id = container['Id'] + self.client.start(container) + self.tmp_containers.append(id) + info = self.client.inspect_container(id) + self.assertIn('State', info) + self.assertIn('StartedAt', info['State']) + start_time1 = info['State']['StartedAt'] + self.client.restart(container, timeout=2) + info2 = self.client.inspect_container(id) + self.assertIn('State', info2) + self.assertIn('StartedAt', info2['State']) + start_time2 = info2['State']['StartedAt'] + self.assertNotEqual(start_time1, start_time2) + self.assertIn('Running', info2['State']) + self.assertEqual(info2['State']['Running'], True) + self.client.kill(id) + + +class RemoveContainerTest(helpers.BaseTestCase): + def test_remove(self): + container = self.client.create_container(BUSYBOX, ['true']) + id = container['Id'] + self.client.start(id) + self.client.wait(id) + self.client.remove_container(id) + containers = self.client.containers(all=True) + res = [x for x in containers if 'Id' in x and x['Id'].startswith(id)] + self.assertEqual(len(res), 0) + + def test_remove_with_dict_instead_of_id(self): + container = self.client.create_container(BUSYBOX, ['true']) + id = container['Id'] + self.client.start(id) + self.client.wait(id) + self.client.remove_container(container) + containers = self.client.containers(all=True) + res = [x for x in containers if 'Id' in x and x['Id'].startswith(id)] + self.assertEqual(len(res), 0) + + +class AttachContainerTest(helpers.BaseTestCase): + def test_run_container_streaming(self): + container = self.client.create_container(BUSYBOX, '/bin/sh', + detach=True, stdin_open=True) + id = container['Id'] + self.tmp_containers.append(id) + self.client.start(id) + sock = self.client.attach_socket(container, ws=False) + self.assertTrue(sock.fileno() > -1) + + def test_run_container_reading_socket(self): + line = 'hi there and stuff and things, words!' + # `echo` appends CRLF, `printf` doesn't + command = "printf '{0}'".format(line) + container = self.client.create_container(BUSYBOX, command, + detach=True, tty=False) + ident = container['Id'] + self.tmp_containers.append(ident) + + opts = {"stdout": 1, "stream": 1, "logs": 1} + pty_stdout = self.client.attach_socket(ident, opts) + self.addCleanup(pty_stdout.close) + + self.client.start(ident) + + next_size = helpers.next_packet_size(pty_stdout) + self.assertEqual(next_size, len(line)) + data = helpers.read_data(pty_stdout, next_size) + self.assertEqual(data.decode('utf-8'), line) + + +class PauseTest(helpers.BaseTestCase): + def test_pause_unpause(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + id = container['Id'] + self.tmp_containers.append(id) + self.client.start(container) + self.client.pause(id) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + self.assertEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], True) + self.assertIn('Paused', state) + self.assertEqual(state['Paused'], True) + + self.client.unpause(id) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + self.assertEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], True) + self.assertIn('Paused', state) + self.assertEqual(state['Paused'], False) + + +class GetContainerStatsTest(helpers.BaseTestCase): + @requires_api_version('1.19') + def test_get_container_stats_no_stream(self): + container = self.client.create_container( + BUSYBOX, ['sleep', '60'], + ) + self.tmp_containers.append(container) + self.client.start(container) + response = self.client.stats(container, stream=0) + self.client.kill(container) + + self.assertEqual(type(response), dict) + for key in ['read', 'networks', 'precpu_stats', 'cpu_stats', + 'memory_stats', 'blkio_stats']: + self.assertIn(key, response) + + @requires_api_version('1.17') + def test_get_container_stats_stream(self): + container = self.client.create_container( + BUSYBOX, ['sleep', '60'], + ) + self.tmp_containers.append(container) + self.client.start(container) + stream = self.client.stats(container) + for chunk in stream: + self.assertEqual(type(chunk), dict) + for key in ['read', 'network', 'precpu_stats', 'cpu_stats', + 'memory_stats', 'blkio_stats']: + self.assertIn(key, chunk) + + +class ContainerUpdateTest(helpers.BaseTestCase): + @requires_api_version('1.22') + def test_update_container(self): + old_mem_limit = 400 * 1024 * 1024 + new_mem_limit = 300 * 1024 * 1024 + container = self.client.create_container( + BUSYBOX, 'top', host_config=self.client.create_host_config( + mem_limit=old_mem_limit + ), cpu_shares=102 + ) + self.tmp_containers.append(container) + self.client.start(container) + self.client.update_container(container, mem_limit=new_mem_limit) + inspect_data = self.client.inspect_container(container) + self.assertEqual(inspect_data['HostConfig']['Memory'], new_mem_limit) + self.assertEqual(inspect_data['HostConfig']['CpuShares'], 102) diff --git a/testbed/docker__docker-py/tests/integration/exec_test.py b/testbed/docker__docker-py/tests/integration/exec_test.py new file mode 100644 index 0000000000000000000000000000000000000000..9f5480808bcc72ca6152b7a9ac604aa7ff512cd4 --- /dev/null +++ b/testbed/docker__docker-py/tests/integration/exec_test.py @@ -0,0 +1,130 @@ +import pytest + +from .. import helpers + +BUSYBOX = helpers.BUSYBOX + + +class ExecTest(helpers.BaseTestCase): + def test_execute_command(self): + if not helpers.exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + + res = self.client.exec_create(id, ['echo', 'hello']) + self.assertIn('Id', res) + + exec_log = self.client.exec_start(res) + self.assertEqual(exec_log, b'hello\n') + + def test_exec_command_string(self): + if not helpers.exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + + res = self.client.exec_create(id, 'echo hello world') + self.assertIn('Id', res) + + exec_log = self.client.exec_start(res) + self.assertEqual(exec_log, b'hello world\n') + + def test_exec_command_as_user(self): + if not helpers.exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + + res = self.client.exec_create(id, 'whoami', user='default') + self.assertIn('Id', res) + + exec_log = self.client.exec_start(res) + self.assertEqual(exec_log, b'default\n') + + def test_exec_command_as_root(self): + if not helpers.exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + + res = self.client.exec_create(id, 'whoami') + self.assertIn('Id', res) + + exec_log = self.client.exec_start(res) + self.assertEqual(exec_log, b'root\n') + + def test_exec_command_streaming(self): + if not helpers.exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.tmp_containers.append(id) + self.client.start(id) + + exec_id = self.client.exec_create(id, ['echo', 'hello\nworld']) + self.assertIn('Id', exec_id) + + res = b'' + for chunk in self.client.exec_start(exec_id, stream=True): + res += chunk + self.assertEqual(res, b'hello\nworld\n') + + def test_exec_start_socket(self): + if not helpers.exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + container_id = container['Id'] + self.client.start(container_id) + self.tmp_containers.append(container_id) + + line = 'yay, interactive exec!' + # `echo` appends CRLF, `printf` doesn't + exec_id = self.client.exec_create( + container_id, ['printf', line], tty=True) + self.assertIn('Id', exec_id) + + socket = self.client.exec_start(exec_id, socket=True) + self.addCleanup(socket.close) + + next_size = helpers.next_packet_size(socket) + self.assertEqual(next_size, len(line)) + data = helpers.read_data(socket, next_size) + self.assertEqual(data.decode('utf-8'), line) + + def test_exec_inspect(self): + if not helpers.exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + + exec_id = self.client.exec_create(id, ['mkdir', '/does/not/exist']) + self.assertIn('Id', exec_id) + self.client.exec_start(exec_id) + exec_info = self.client.exec_inspect(exec_id) + self.assertIn('ExitCode', exec_info) + self.assertNotEqual(exec_info['ExitCode'], 0) diff --git a/testbed/docker__docker-py/tests/integration/image_test.py b/testbed/docker__docker-py/tests/integration/image_test.py new file mode 100644 index 0000000000000000000000000000000000000000..825f632722a3e046969e8175312434d65d0dfc47 --- /dev/null +++ b/testbed/docker__docker-py/tests/integration/image_test.py @@ -0,0 +1,235 @@ +import contextlib +import json +import shutil +import socket +import tarfile +import tempfile +import threading + +import pytest +import six +from six.moves import BaseHTTPServer +from six.moves import socketserver + + +import docker + +from .. import helpers + +BUSYBOX = helpers.BUSYBOX + + +class ListImagesTest(helpers.BaseTestCase): + def test_images(self): + res1 = self.client.images(all=True) + self.assertIn('Id', res1[0]) + res10 = res1[0] + self.assertIn('Created', res10) + self.assertIn('RepoTags', res10) + distinct = [] + for img in res1: + if img['Id'] not in distinct: + distinct.append(img['Id']) + self.assertEqual(len(distinct), self.client.info()['Images']) + + def test_images_quiet(self): + res1 = self.client.images(quiet=True) + self.assertEqual(type(res1[0]), six.text_type) + + +class PullImageTest(helpers.BaseTestCase): + def test_pull(self): + try: + self.client.remove_image('hello-world') + except docker.errors.APIError: + pass + res = self.client.pull('hello-world') + self.tmp_imgs.append('hello-world') + self.assertEqual(type(res), six.text_type) + self.assertGreaterEqual( + len(self.client.images('hello-world')), 1 + ) + img_info = self.client.inspect_image('hello-world') + self.assertIn('Id', img_info) + + def test_pull_streaming(self): + try: + self.client.remove_image('hello-world') + except docker.errors.APIError: + pass + stream = self.client.pull('hello-world', stream=True) + self.tmp_imgs.append('hello-world') + for chunk in stream: + if six.PY3: + chunk = chunk.decode('utf-8') + json.loads(chunk) # ensure chunk is a single, valid JSON blob + self.assertGreaterEqual( + len(self.client.images('hello-world')), 1 + ) + img_info = self.client.inspect_image('hello-world') + self.assertIn('Id', img_info) + + +class CommitTest(helpers.BaseTestCase): + def test_commit(self): + container = self.client.create_container(BUSYBOX, ['touch', '/test']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + res = self.client.commit(id) + self.assertIn('Id', res) + img_id = res['Id'] + self.tmp_imgs.append(img_id) + img = self.client.inspect_image(img_id) + self.assertIn('Container', img) + self.assertTrue(img['Container'].startswith(id)) + self.assertIn('ContainerConfig', img) + self.assertIn('Image', img['ContainerConfig']) + self.assertEqual(BUSYBOX, img['ContainerConfig']['Image']) + busybox_id = self.client.inspect_image(BUSYBOX)['Id'] + self.assertIn('Parent', img) + self.assertEqual(img['Parent'], busybox_id) + + +class RemoveImageTest(helpers.BaseTestCase): + def test_remove(self): + container = self.client.create_container(BUSYBOX, ['touch', '/test']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + res = self.client.commit(id) + self.assertIn('Id', res) + img_id = res['Id'] + self.tmp_imgs.append(img_id) + self.client.remove_image(img_id, force=True) + images = self.client.images(all=True) + res = [x for x in images if x['Id'].startswith(img_id)] + self.assertEqual(len(res), 0) + + +class ImportImageTest(helpers.BaseTestCase): + '''Base class for `docker import` test cases.''' + + TAR_SIZE = 512 * 1024 + + def write_dummy_tar_content(self, n_bytes, tar_fd): + def extend_file(f, n_bytes): + f.seek(n_bytes - 1) + f.write(bytearray([65])) + f.seek(0) + + tar = tarfile.TarFile(fileobj=tar_fd, mode='w') + + with tempfile.NamedTemporaryFile() as f: + extend_file(f, n_bytes) + tarinfo = tar.gettarinfo(name=f.name, arcname='testdata') + tar.addfile(tarinfo, fileobj=f) + + tar.close() + + @contextlib.contextmanager + def dummy_tar_stream(self, n_bytes): + '''Yields a stream that is valid tar data of size n_bytes.''' + with tempfile.NamedTemporaryFile() as tar_file: + self.write_dummy_tar_content(n_bytes, tar_file) + tar_file.seek(0) + yield tar_file + + @contextlib.contextmanager + def dummy_tar_file(self, n_bytes): + '''Yields the name of a valid tar file of size n_bytes.''' + with tempfile.NamedTemporaryFile() as tar_file: + self.write_dummy_tar_content(n_bytes, tar_file) + tar_file.seek(0) + yield tar_file.name + + def test_import_from_bytes(self): + with self.dummy_tar_stream(n_bytes=500) as f: + content = f.read() + + # The generic import_image() function cannot import in-memory bytes + # data that happens to be represented as a string type, because + # import_image() will try to use it as a filename and usually then + # trigger an exception. So we test the import_image_from_data() + # function instead. + statuses = self.client.import_image_from_data( + content, repository='test/import-from-bytes') + + result_text = statuses.splitlines()[-1] + result = json.loads(result_text) + + self.assertNotIn('error', result) + + img_id = result['status'] + self.tmp_imgs.append(img_id) + + def test_import_from_file(self): + with self.dummy_tar_file(n_bytes=self.TAR_SIZE) as tar_filename: + # statuses = self.client.import_image( + # src=tar_filename, repository='test/import-from-file') + statuses = self.client.import_image_from_file( + tar_filename, repository='test/import-from-file') + + result_text = statuses.splitlines()[-1] + result = json.loads(result_text) + + self.assertNotIn('error', result) + + self.assertIn('status', result) + img_id = result['status'] + self.tmp_imgs.append(img_id) + + def test_import_from_stream(self): + with self.dummy_tar_stream(n_bytes=self.TAR_SIZE) as tar_stream: + statuses = self.client.import_image( + src=tar_stream, repository='test/import-from-stream') + # statuses = self.client.import_image_from_stream( + # tar_stream, repository='test/import-from-stream') + result_text = statuses.splitlines()[-1] + result = json.loads(result_text) + + self.assertNotIn('error', result) + + self.assertIn('status', result) + img_id = result['status'] + self.tmp_imgs.append(img_id) + + @contextlib.contextmanager + def temporary_http_file_server(self, stream): + '''Serve data from an IO stream over HTTP.''' + + class Handler(BaseHTTPServer.BaseHTTPRequestHandler): + def do_GET(self): + self.send_response(200) + self.send_header('Content-Type', 'application/x-tar') + self.end_headers() + shutil.copyfileobj(stream, self.wfile) + + server = socketserver.TCPServer(('', 0), Handler) + thread = threading.Thread(target=server.serve_forever) + thread.setDaemon(True) + thread.start() + + yield 'http://%s:%s' % (socket.gethostname(), server.server_address[1]) + + server.shutdown() + + @pytest.mark.skipif(True, reason="Doesn't work inside a container - FIXME") + def test_import_from_url(self): + # The crappy test HTTP server doesn't handle large files well, so use + # a small file. + tar_size = 10240 + + with self.dummy_tar_stream(n_bytes=tar_size) as tar_data: + with self.temporary_http_file_server(tar_data) as url: + statuses = self.client.import_image( + src=url, repository='test/import-from-url') + + result_text = statuses.splitlines()[-1] + result = json.loads(result_text) + + self.assertNotIn('error', result) + + self.assertIn('status', result) + img_id = result['status'] + self.tmp_imgs.append(img_id) diff --git a/testbed/docker__docker-py/tests/integration/network_test.py b/testbed/docker__docker-py/tests/integration/network_test.py new file mode 100644 index 0000000000000000000000000000000000000000..a7446172dcd8a74be0ca0a4ed27cacd2bf23e626 --- /dev/null +++ b/testbed/docker__docker-py/tests/integration/network_test.py @@ -0,0 +1,285 @@ +import random + +import docker +from docker.utils import create_ipam_config +from docker.utils import create_ipam_pool +import pytest + +from .. import helpers +from ..base import requires_api_version + + +class TestNetworks(helpers.BaseTestCase): + def create_network(self, *args, **kwargs): + net_name = u'dockerpy{}'.format(random.getrandbits(24))[:14] + net_id = self.client.create_network(net_name, *args, **kwargs)['Id'] + self.tmp_networks.append(net_id) + return (net_name, net_id) + + @requires_api_version('1.21') + def test_list_networks(self): + networks = self.client.networks() + initial_size = len(networks) + + net_name, net_id = self.create_network() + + networks = self.client.networks() + self.assertEqual(len(networks), initial_size + 1) + self.assertTrue(net_id in [n['Id'] for n in networks]) + + networks_by_name = self.client.networks(names=[net_name]) + self.assertEqual([n['Id'] for n in networks_by_name], [net_id]) + + networks_by_partial_id = self.client.networks(ids=[net_id[:8]]) + self.assertEqual([n['Id'] for n in networks_by_partial_id], [net_id]) + + @requires_api_version('1.21') + def test_inspect_network(self): + net_name, net_id = self.create_network() + + net = self.client.inspect_network(net_id) + self.assertEqual(net['Id'], net_id) + self.assertEqual(net['Name'], net_name) + self.assertEqual(net['Driver'], 'bridge') + self.assertEqual(net['Scope'], 'local') + self.assertEqual(net['IPAM']['Driver'], 'default') + + @requires_api_version('1.21') + def test_create_network_with_ipam_config(self): + _, net_id = self.create_network( + ipam=create_ipam_config( + driver='default', + pool_configs=[ + create_ipam_pool( + subnet="172.28.0.0/16", + iprange="172.28.5.0/24", + gateway="172.28.5.254", + aux_addresses={ + "a": "172.28.1.5", + "b": "172.28.1.6", + "c": "172.28.1.7", + }, + ), + ], + ), + ) + + net = self.client.inspect_network(net_id) + ipam = net['IPAM'] + + assert ipam.pop('Options', None) is None + + assert ipam == { + 'Driver': 'default', + 'Config': [{ + 'Subnet': "172.28.0.0/16", + 'IPRange': "172.28.5.0/24", + 'Gateway': "172.28.5.254", + 'AuxiliaryAddresses': { + "a": "172.28.1.5", + "b": "172.28.1.6", + "c": "172.28.1.7", + }, + }], + } + + @requires_api_version('1.21') + def test_create_network_with_host_driver_fails(self): + net_name = 'dockerpy{}'.format(random.getrandbits(24))[:14] + + with pytest.raises(docker.errors.APIError): + self.client.create_network(net_name, driver='host') + + @requires_api_version('1.21') + def test_remove_network(self): + initial_size = len(self.client.networks()) + + net_name, net_id = self.create_network() + self.assertEqual(len(self.client.networks()), initial_size + 1) + + self.client.remove_network(net_id) + self.assertEqual(len(self.client.networks()), initial_size) + + @requires_api_version('1.21') + def test_connect_and_disconnect_container(self): + net_name, net_id = self.create_network() + + container = self.client.create_container('busybox', 'top') + self.tmp_containers.append(container) + self.client.start(container) + + network_data = self.client.inspect_network(net_id) + self.assertFalse(network_data.get('Containers')) + + self.client.connect_container_to_network(container, net_id) + network_data = self.client.inspect_network(net_id) + self.assertEqual( + list(network_data['Containers'].keys()), + [container['Id']]) + + with pytest.raises(docker.errors.APIError): + self.client.connect_container_to_network(container, net_id) + + self.client.disconnect_container_from_network(container, net_id) + network_data = self.client.inspect_network(net_id) + self.assertFalse(network_data.get('Containers')) + + with pytest.raises(docker.errors.APIError): + self.client.disconnect_container_from_network(container, net_id) + + @requires_api_version('1.22') + def test_connect_with_aliases(self): + net_name, net_id = self.create_network() + + container = self.client.create_container('busybox', 'top') + self.tmp_containers.append(container) + self.client.start(container) + + self.client.connect_container_to_network( + container, net_id, aliases=['foo', 'bar']) + container_data = self.client.inspect_container(container) + self.assertEqual( + container_data['NetworkSettings']['Networks'][net_name]['Aliases'], + ['foo', 'bar']) + + @requires_api_version('1.21') + def test_connect_on_container_create(self): + net_name, net_id = self.create_network() + + container = self.client.create_container( + image='busybox', + command='top', + host_config=self.client.create_host_config(network_mode=net_name), + ) + self.tmp_containers.append(container) + self.client.start(container) + + network_data = self.client.inspect_network(net_id) + self.assertEqual( + list(network_data['Containers'].keys()), + [container['Id']]) + + self.client.disconnect_container_from_network(container, net_id) + network_data = self.client.inspect_network(net_id) + self.assertFalse(network_data.get('Containers')) + + @requires_api_version('1.22') + def test_create_with_aliases(self): + net_name, net_id = self.create_network() + + container = self.client.create_container( + image='busybox', + command='top', + host_config=self.client.create_host_config( + network_mode=net_name, + ), + networking_config=self.client.create_networking_config({ + net_name: self.client.create_endpoint_config( + aliases=['foo', 'bar'], + ), + }), + ) + self.tmp_containers.append(container) + self.client.start(container) + + container_data = self.client.inspect_container(container) + self.assertEqual( + container_data['NetworkSettings']['Networks'][net_name]['Aliases'], + ['foo', 'bar']) + + @requires_api_version('1.22') + def test_create_with_links(self): + net_name, net_id = self.create_network() + + container = self.create_and_start( + host_config=self.client.create_host_config(network_mode=net_name), + networking_config=self.client.create_networking_config({ + net_name: self.client.create_endpoint_config( + links=[('docker-py-test-upstream', 'bar')], + ), + }), + ) + + container_data = self.client.inspect_container(container) + self.assertEqual( + container_data['NetworkSettings']['Networks'][net_name]['Links'], + ['docker-py-test-upstream:bar']) + + self.create_and_start( + name='docker-py-test-upstream', + host_config=self.client.create_host_config(network_mode=net_name), + ) + + self.execute(container, ['nslookup', 'bar']) + + @requires_api_version('1.22') + def test_connect_with_links(self): + net_name, net_id = self.create_network() + + container = self.create_and_start( + host_config=self.client.create_host_config(network_mode=net_name)) + + self.client.disconnect_container_from_network(container, net_name) + self.client.connect_container_to_network( + container, net_name, + links=[('docker-py-test-upstream', 'bar')]) + + container_data = self.client.inspect_container(container) + self.assertEqual( + container_data['NetworkSettings']['Networks'][net_name]['Links'], + ['docker-py-test-upstream:bar']) + + self.create_and_start( + name='docker-py-test-upstream', + host_config=self.client.create_host_config(network_mode=net_name), + ) + + self.execute(container, ['nslookup', 'bar']) + + @requires_api_version('1.22') + def test_connect_with_ipv4_address(self): + net_name, net_id = self.create_network() + + container = self.create_and_start( + host_config=self.client.create_host_config(network_mode=net_name)) + + self.client.disconnect_container_from_network(container, net_name) + self.client.connect_container_to_network( + container, net_name, + ipv4_address='192.168.0.1') + + container_data = self.client.inspect_container(container) + self.assertEqual( + container_data['NetworkSettings']['Networks'][net_name] + ['IPAMConfig']['IPv4Address'], + '192.168.0.1') + + self.create_and_start( + name='docker-py-test-upstream', + host_config=self.client.create_host_config(network_mode=net_name)) + + self.execute(container, ['nslookup', 'bar']) + + @requires_api_version('1.22') + def test_connect_with_ipv6_address(self): + net_name, net_id = self.create_network() + + container = self.create_and_start( + host_config=self.client.create_host_config(network_mode=net_name)) + + self.client.disconnect_container_from_network(container, net_name) + self.client.connect_container_to_network( + container, net_name, + ipv6_address='2001:389::1') + + container_data = self.client.inspect_container(container) + self.assertEqual( + container_data['NetworkSettings']['Networks'][net_name] + ['IPAMConfig']['IPv6Address'], + '2001:389::1') + + self.create_and_start( + name='docker-py-test-upstream', + host_config=self.client.create_host_config(network_mode=net_name)) + + self.execute(container, ['nslookup', 'bar']) diff --git a/testbed/docker__docker-py/tests/integration/regression_test.py b/testbed/docker__docker-py/tests/integration/regression_test.py new file mode 100644 index 0000000000000000000000000000000000000000..8b321cf5d79ee937ff99ae2fdb7081349e140b41 --- /dev/null +++ b/testbed/docker__docker-py/tests/integration/regression_test.py @@ -0,0 +1,69 @@ +import io +import random + +import docker +import six + +from .. import helpers + +BUSYBOX = helpers.BUSYBOX + + +class TestRegressions(helpers.BaseTestCase): + def test_443_handle_nonchunked_response_in_stream(self): + dfile = io.BytesIO() + with self.assertRaises(docker.errors.APIError) as exc: + for line in self.client.build(fileobj=dfile, tag="a/b/c"): + pass + self.assertEqual(exc.exception.response.status_code, 500) + dfile.close() + + def test_542_truncate_ids_client_side(self): + self.client.start( + self.client.create_container(BUSYBOX, ['true']) + ) + result = self.client.containers(all=True, trunc=True) + self.assertEqual(len(result[0]['Id']), 12) + + def test_647_support_doubleslash_in_image_names(self): + with self.assertRaises(docker.errors.APIError): + self.client.inspect_image('gensokyo.jp//kirisame') + + def test_649_handle_timeout_value_none(self): + self.client.timeout = None + ctnr = self.client.create_container(BUSYBOX, ['sleep', '2']) + self.client.start(ctnr) + self.client.stop(ctnr) + + def test_715_handle_user_param_as_int_value(self): + ctnr = self.client.create_container(BUSYBOX, ['id', '-u'], user=1000) + self.client.start(ctnr) + self.client.wait(ctnr) + logs = self.client.logs(ctnr) + if six.PY3: + logs = logs.decode('utf-8') + assert logs == '1000\n' + + def test_792_explicit_port_protocol(self): + + tcp_port, udp_port = random.sample(range(9999, 32000), 2) + ctnr = self.client.create_container( + BUSYBOX, ['sleep', '9999'], ports=[2000, (2000, 'udp')], + host_config=self.client.create_host_config( + port_bindings={'2000/tcp': tcp_port, '2000/udp': udp_port} + ) + ) + self.tmp_containers.append(ctnr) + self.client.start(ctnr) + self.assertEqual( + self.client.port(ctnr, 2000)[0]['HostPort'], + six.text_type(tcp_port) + ) + self.assertEqual( + self.client.port(ctnr, '2000/tcp')[0]['HostPort'], + six.text_type(tcp_port) + ) + self.assertEqual( + self.client.port(ctnr, '2000/udp')[0]['HostPort'], + six.text_type(udp_port) + ) diff --git a/testbed/docker__docker-py/tests/integration/volume_test.py b/testbed/docker__docker-py/tests/integration/volume_test.py new file mode 100644 index 0000000000000000000000000000000000000000..8fa2dab53fc21435c0fc902f17bfde02358f78d8 --- /dev/null +++ b/testbed/docker__docker-py/tests/integration/volume_test.py @@ -0,0 +1,55 @@ +import docker +import pytest + +from .. import helpers +from ..base import requires_api_version + + +@requires_api_version('1.21') +class TestVolumes(helpers.BaseTestCase): + def test_create_volume(self): + name = 'perfectcherryblossom' + self.tmp_volumes.append(name) + result = self.client.create_volume(name) + self.assertIn('Name', result) + self.assertEqual(result['Name'], name) + self.assertIn('Driver', result) + self.assertEqual(result['Driver'], 'local') + + def test_create_volume_invalid_driver(self): + driver_name = 'invalid.driver' + + with pytest.raises(docker.errors.NotFound): + self.client.create_volume('perfectcherryblossom', driver_name) + + def test_list_volumes(self): + name = 'imperishablenight' + self.tmp_volumes.append(name) + volume_info = self.client.create_volume(name) + result = self.client.volumes() + self.assertIn('Volumes', result) + volumes = result['Volumes'] + self.assertIn(volume_info, volumes) + + def test_inspect_volume(self): + name = 'embodimentofscarletdevil' + self.tmp_volumes.append(name) + volume_info = self.client.create_volume(name) + result = self.client.inspect_volume(name) + self.assertEqual(volume_info, result) + + def test_inspect_nonexistent_volume(self): + name = 'embodimentofscarletdevil' + with pytest.raises(docker.errors.NotFound): + self.client.inspect_volume(name) + + def test_remove_volume(self): + name = 'shootthebullet' + self.tmp_volumes.append(name) + self.client.create_volume(name) + self.client.remove_volume(name) + + def test_remove_nonexistent_volume(self): + name = 'shootthebullet' + with pytest.raises(docker.errors.NotFound): + self.client.remove_volume(name) diff --git a/testbed/docker__docker-py/tests/unit/__init__.py b/testbed/docker__docker-py/tests/unit/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/docker__docker-py/tests/unit/api_test.py b/testbed/docker__docker-py/tests/unit/api_test.py new file mode 100644 index 0000000000000000000000000000000000000000..23fd1913467b9b0d940b93618cc719933486ce9b --- /dev/null +++ b/testbed/docker__docker-py/tests/unit/api_test.py @@ -0,0 +1,417 @@ +# Copyright 2013 dotCloud inc. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import json +import os +import re +import shutil +import socket +import sys +import tempfile +import threading +import time + +import docker +import requests +import six + +from .. import base +from . import fake_api + +import pytest + +try: + from unittest import mock +except ImportError: + import mock + + +DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS + + +def response(status_code=200, content='', headers=None, reason=None, elapsed=0, + request=None): + res = requests.Response() + res.status_code = status_code + if not isinstance(content, six.binary_type): + content = json.dumps(content).encode('ascii') + res._content = content + res.headers = requests.structures.CaseInsensitiveDict(headers or {}) + res.reason = reason + res.elapsed = datetime.timedelta(elapsed) + res.request = request + return res + + +def fake_resolve_authconfig(authconfig, registry=None): + return None + + +def fake_inspect_container(self, container, tty=False): + return fake_api.get_fake_inspect_container(tty=tty)[1] + + +def fake_resp(method, url, *args, **kwargs): + key = None + if url in fake_api.fake_responses: + key = url + elif (url, method) in fake_api.fake_responses: + key = (url, method) + if not key: + raise Exception('{0} {1}'.format(method, url)) + status_code, content = fake_api.fake_responses[key]() + return response(status_code=status_code, content=content) + + +fake_request = mock.Mock(side_effect=fake_resp) + + +def fake_get(self, url, *args, **kwargs): + return fake_request('GET', url, *args, **kwargs) + + +def fake_post(self, url, *args, **kwargs): + return fake_request('POST', url, *args, **kwargs) + + +def fake_put(self, url, *args, **kwargs): + return fake_request('PUT', url, *args, **kwargs) + + +def fake_delete(self, url, *args, **kwargs): + return fake_request('DELETE', url, *args, **kwargs) + +url_base = 'http+docker://localunixsocket/' +url_prefix = '{0}v{1}/'.format( + url_base, + docker.constants.DEFAULT_DOCKER_API_VERSION) + + +class DockerClientTest(base.Cleanup, base.BaseTestCase): + def setUp(self): + self.patcher = mock.patch.multiple( + 'docker.Client', get=fake_get, post=fake_post, put=fake_put, + delete=fake_delete + ) + self.patcher.start() + self.client = docker.Client() + # Force-clear authconfig to avoid tampering with the tests + self.client._cfg = {'Configs': {}} + + def tearDown(self): + self.client.close() + self.patcher.stop() + + def assertIn(self, object, collection): + if six.PY2 and sys.version_info[1] <= 6: + return self.assertTrue(object in collection) + return super(DockerClientTest, self).assertIn(object, collection) + + def base_create_payload(self, img='busybox', cmd=None): + if not cmd: + cmd = ['true'] + return {"Tty": False, "Image": img, "Cmd": cmd, + "AttachStdin": False, + "AttachStderr": True, "AttachStdout": True, + "StdinOnce": False, + "OpenStdin": False, "NetworkDisabled": False, + } + + +class DockerApiTest(DockerClientTest): + def test_ctor(self): + with pytest.raises(docker.errors.DockerException) as excinfo: + docker.Client(version=1.12) + + self.assertEqual( + str(excinfo.value), + 'Version parameter must be a string or None. Found float' + ) + + def test_url_valid_resource(self): + url = self.client._url('/hello/{0}/world', 'somename') + self.assertEqual( + url, '{0}{1}'.format(url_prefix, 'hello/somename/world') + ) + + url = self.client._url( + '/hello/{0}/world/{1}', 'somename', 'someothername' + ) + self.assertEqual( + url, + '{0}{1}'.format(url_prefix, 'hello/somename/world/someothername') + ) + + url = self.client._url('/hello/{0}/world', '/some?name') + self.assertEqual( + url, '{0}{1}'.format(url_prefix, 'hello/%2Fsome%3Fname/world') + ) + + def test_url_invalid_resource(self): + with pytest.raises(ValueError): + self.client._url('/hello/{0}/world', ['sakuya', 'izayoi']) + + def test_url_no_resource(self): + url = self.client._url('/simple') + self.assertEqual(url, '{0}{1}'.format(url_prefix, 'simple')) + + def test_url_unversioned_api(self): + url = self.client._url( + '/hello/{0}/world', 'somename', versioned_api=False + ) + self.assertEqual( + url, '{0}{1}'.format(url_base, 'hello/somename/world') + ) + + def test_version(self): + self.client.version() + + fake_request.assert_called_with( + 'GET', + url_prefix + 'version', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_version_no_api_version(self): + self.client.version(False) + + fake_request.assert_called_with( + 'GET', + url_base + 'version', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_retrieve_server_version(self): + client = docker.Client(version="auto") + self.assertTrue(isinstance(client._version, six.string_types)) + self.assertFalse(client._version == "auto") + client.close() + + def test_auto_retrieve_server_version(self): + version = self.client._retrieve_server_version() + self.assertTrue(isinstance(version, six.string_types)) + + def test_info(self): + self.client.info() + + fake_request.assert_called_with( + 'GET', + url_prefix + 'info', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_search(self): + self.client.search('busybox') + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/search', + params={'term': 'busybox'}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_events(self): + self.client.events() + + fake_request.assert_called_with( + 'GET', + url_prefix + 'events', + params={'since': None, 'until': None, 'filters': None}, + stream=True + ) + + def test_events_with_since_until(self): + ts = 1356048000 + now = datetime.datetime.utcfromtimestamp(ts) + since = now - datetime.timedelta(seconds=10) + until = now + datetime.timedelta(seconds=10) + + self.client.events(since=since, until=until) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'events', + params={ + 'since': ts - 10, + 'until': ts + 10, + 'filters': None + }, + stream=True + ) + + def test_events_with_filters(self): + filters = {'event': ['die', 'stop'], + 'container': fake_api.FAKE_CONTAINER_ID} + + self.client.events(filters=filters) + + expected_filters = docker.utils.convert_filters(filters) + fake_request.assert_called_with( + 'GET', + url_prefix + 'events', + params={ + 'since': None, + 'until': None, + 'filters': expected_filters + }, + stream=True + ) + + def _socket_path_for_client_session(self, client): + socket_adapter = client.get_adapter('http+docker://') + return socket_adapter.socket_path + + def test_url_compatibility_unix(self): + c = docker.Client(base_url="unix://socket") + + assert self._socket_path_for_client_session(c) == '/socket' + + def test_url_compatibility_unix_triple_slash(self): + c = docker.Client(base_url="unix:///socket") + + assert self._socket_path_for_client_session(c) == '/socket' + + def test_url_compatibility_http_unix_triple_slash(self): + c = docker.Client(base_url="http+unix:///socket") + + assert self._socket_path_for_client_session(c) == '/socket' + + def test_url_compatibility_http(self): + c = docker.Client(base_url="http://hostname:1234") + + assert c.base_url == "http://hostname:1234" + + def test_url_compatibility_tcp(self): + c = docker.Client(base_url="tcp://hostname:1234") + + assert c.base_url == "http://hostname:1234" + + def test_remove_link(self): + self.client.remove_container(fake_api.FAKE_CONTAINER_ID, link=True) + + fake_request.assert_called_with( + 'DELETE', + url_prefix + 'containers/3cc2351ab11b', + params={'v': False, 'link': True, 'force': False}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_host_config_secopt(self): + security_opt = ['apparmor:test_profile'] + result = self.client.create_host_config(security_opt=security_opt) + self.assertIn('SecurityOpt', result) + self.assertEqual(result['SecurityOpt'], security_opt) + self.assertRaises( + TypeError, self.client.create_host_config, security_opt='wrong' + ) + + +class StreamTest(base.Cleanup, base.BaseTestCase): + def setUp(self): + socket_dir = tempfile.mkdtemp() + self.build_context = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, socket_dir) + self.addCleanup(shutil.rmtree, self.build_context) + self.socket_file = os.path.join(socket_dir, 'test_sock.sock') + self.server_socket = self._setup_socket() + self.stop_server = False + server_thread = threading.Thread(target=self.run_server) + server_thread.setDaemon(True) + server_thread.start() + self.response = None + self.request_handler = None + self.addCleanup(server_thread.join) + self.addCleanup(self.stop) + + def stop(self): + self.stop_server = True + + def _setup_socket(self): + server_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + server_sock.bind(self.socket_file) + # Non-blocking mode so that we can shut the test down easily + server_sock.setblocking(0) + server_sock.listen(5) + return server_sock + + def run_server(self): + try: + while not self.stop_server: + try: + connection, client_address = self.server_socket.accept() + except socket.error: + # Probably no connection to accept yet + time.sleep(0.01) + continue + + connection.setblocking(1) + try: + self.request_handler(connection) + finally: + connection.close() + finally: + self.server_socket.close() + + def early_response_sending_handler(self, connection): + data = b'' + headers = None + + connection.sendall(self.response) + while not headers: + data += connection.recv(2048) + parts = data.split(b'\r\n\r\n', 1) + if len(parts) == 2: + headers, data = parts + + mo = re.search(r'Content-Length: ([0-9]+)', headers.decode()) + assert mo + content_length = int(mo.group(1)) + + while True: + if len(data) >= content_length: + break + + data += connection.recv(2048) + + def test_early_stream_response(self): + self.request_handler = self.early_response_sending_handler + lines = [] + for i in range(0, 50): + line = str(i).encode() + lines += [('%x' % len(line)).encode(), line] + lines.append(b'0') + lines.append(b'') + + self.response = ( + b'HTTP/1.1 200 OK\r\n' + b'Transfer-Encoding: chunked\r\n' + b'\r\n' + ) + b'\r\n'.join(lines) + + with docker.Client(base_url="http+unix://" + self.socket_file) \ + as client: + for i in range(5): + try: + stream = client.build( + path=self.build_context, + stream=True + ) + break + except requests.ConnectionError as e: + if i == 4: + raise e + + self.assertEqual(list(stream), [ + str(i).encode() for i in range(50)]) diff --git a/testbed/docker__docker-py/tests/unit/auth_test.py b/testbed/docker__docker-py/tests/unit/auth_test.py new file mode 100644 index 0000000000000000000000000000000000000000..921aae007187f384024093aa61c2538d9137290d --- /dev/null +++ b/testbed/docker__docker-py/tests/unit/auth_test.py @@ -0,0 +1,464 @@ +# -*- coding: utf-8 -*- + +import base64 +import json +import os +import os.path +import random +import shutil +import tempfile + +from docker import auth +from docker.auth.auth import parse_auth +from docker import errors + +from .. import base + +try: + from unittest import mock +except ImportError: + import mock + + +class RegressionTest(base.BaseTestCase): + def test_803_urlsafe_encode(self): + auth_data = { + 'username': 'root', + 'password': 'GR?XGR?XGR?XGR?X' + } + encoded = auth.encode_header(auth_data) + assert b'/' not in encoded + assert b'_' in encoded + + +class ResolveRepositoryNameTest(base.BaseTestCase): + def test_resolve_repository_name_hub_library_image(self): + self.assertEqual( + auth.resolve_repository_name('image'), + ('docker.io', 'image'), + ) + + def test_resolve_repository_name_dotted_hub_library_image(self): + self.assertEqual( + auth.resolve_repository_name('image.valid'), + ('docker.io', 'image.valid') + ) + + def test_resolve_repository_name_hub_image(self): + self.assertEqual( + auth.resolve_repository_name('username/image'), + ('docker.io', 'username/image'), + ) + + def test_explicit_hub_index_library_image(self): + self.assertEqual( + auth.resolve_repository_name('docker.io/image'), + ('docker.io', 'image') + ) + + def test_explicit_legacy_hub_index_library_image(self): + self.assertEqual( + auth.resolve_repository_name('index.docker.io/image'), + ('docker.io', 'image') + ) + + def test_resolve_repository_name_private_registry(self): + self.assertEqual( + auth.resolve_repository_name('my.registry.net/image'), + ('my.registry.net', 'image'), + ) + + def test_resolve_repository_name_private_registry_with_port(self): + self.assertEqual( + auth.resolve_repository_name('my.registry.net:5000/image'), + ('my.registry.net:5000', 'image'), + ) + + def test_resolve_repository_name_private_registry_with_username(self): + self.assertEqual( + auth.resolve_repository_name('my.registry.net/username/image'), + ('my.registry.net', 'username/image'), + ) + + def test_resolve_repository_name_no_dots_but_port(self): + self.assertEqual( + auth.resolve_repository_name('hostname:5000/image'), + ('hostname:5000', 'image'), + ) + + def test_resolve_repository_name_no_dots_but_port_and_username(self): + self.assertEqual( + auth.resolve_repository_name('hostname:5000/username/image'), + ('hostname:5000', 'username/image'), + ) + + def test_resolve_repository_name_localhost(self): + self.assertEqual( + auth.resolve_repository_name('localhost/image'), + ('localhost', 'image'), + ) + + def test_resolve_repository_name_localhost_with_username(self): + self.assertEqual( + auth.resolve_repository_name('localhost/username/image'), + ('localhost', 'username/image'), + ) + + def test_invalid_index_name(self): + self.assertRaises( + errors.InvalidRepository, + lambda: auth.resolve_repository_name('-gecko.com/image') + ) + + +def encode_auth(auth_info): + return base64.b64encode( + auth_info.get('username', '').encode('utf-8') + b':' + + auth_info.get('password', '').encode('utf-8')) + + +class ResolveAuthTest(base.BaseTestCase): + index_config = {'auth': encode_auth({'username': 'indexuser'})} + private_config = {'auth': encode_auth({'username': 'privateuser'})} + legacy_config = {'auth': encode_auth({'username': 'legacyauth'})} + + auth_config = parse_auth({ + 'https://index.docker.io/v1/': index_config, + 'my.registry.net': private_config, + 'http://legacy.registry.url/v1/': legacy_config, + }) + + def test_resolve_authconfig_hostname_only(self): + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, 'my.registry.net' + )['username'], + 'privateuser' + ) + + def test_resolve_authconfig_no_protocol(self): + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, 'my.registry.net/v1/' + )['username'], + 'privateuser' + ) + + def test_resolve_authconfig_no_path(self): + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, 'http://my.registry.net' + )['username'], + 'privateuser' + ) + + def test_resolve_authconfig_no_path_trailing_slash(self): + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, 'http://my.registry.net/' + )['username'], + 'privateuser' + ) + + def test_resolve_authconfig_no_path_wrong_secure_proto(self): + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, 'https://my.registry.net' + )['username'], + 'privateuser' + ) + + def test_resolve_authconfig_no_path_wrong_insecure_proto(self): + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, 'http://index.docker.io' + )['username'], + 'indexuser' + ) + + def test_resolve_authconfig_path_wrong_proto(self): + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, 'https://my.registry.net/v1/' + )['username'], + 'privateuser' + ) + + def test_resolve_authconfig_default_registry(self): + self.assertEqual( + auth.resolve_authconfig(self.auth_config)['username'], + 'indexuser' + ) + + def test_resolve_authconfig_default_explicit_none(self): + self.assertEqual( + auth.resolve_authconfig(self.auth_config, None)['username'], + 'indexuser' + ) + + def test_resolve_authconfig_fully_explicit(self): + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, 'http://my.registry.net/v1/' + )['username'], + 'privateuser' + ) + + def test_resolve_authconfig_legacy_config(self): + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, 'legacy.registry.url' + )['username'], + 'legacyauth' + ) + + def test_resolve_authconfig_no_match(self): + self.assertTrue( + auth.resolve_authconfig(self.auth_config, 'does.not.exist') is None + ) + + def test_resolve_registry_and_auth_library_image(self): + image = 'image' + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, auth.resolve_repository_name(image)[0] + )['username'], + 'indexuser', + ) + + def test_resolve_registry_and_auth_hub_image(self): + image = 'username/image' + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, auth.resolve_repository_name(image)[0] + )['username'], + 'indexuser', + ) + + def test_resolve_registry_and_auth_explicit_hub(self): + image = 'docker.io/username/image' + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, auth.resolve_repository_name(image)[0] + )['username'], + 'indexuser', + ) + + def test_resolve_registry_and_auth_explicit_legacy_hub(self): + image = 'index.docker.io/username/image' + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, auth.resolve_repository_name(image)[0] + )['username'], + 'indexuser', + ) + + def test_resolve_registry_and_auth_private_registry(self): + image = 'my.registry.net/image' + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, auth.resolve_repository_name(image)[0] + )['username'], + 'privateuser', + ) + + def test_resolve_registry_and_auth_unauthenticated_registry(self): + image = 'other.registry.net/image' + self.assertEqual( + auth.resolve_authconfig( + self.auth_config, auth.resolve_repository_name(image)[0] + ), + None, + ) + + +class LoadConfigTest(base.Cleanup, base.BaseTestCase): + def test_load_config_no_file(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + cfg = auth.load_config(folder) + self.assertTrue(cfg is not None) + + def test_load_config(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + dockercfg_path = os.path.join(folder, '.dockercfg') + with open(dockercfg_path, 'w') as f: + auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') + f.write('auth = {0}\n'.format(auth_)) + f.write('email = sakuya@scarlet.net') + cfg = auth.load_config(dockercfg_path) + assert auth.INDEX_NAME in cfg + self.assertNotEqual(cfg[auth.INDEX_NAME], None) + cfg = cfg[auth.INDEX_NAME] + self.assertEqual(cfg['username'], 'sakuya') + self.assertEqual(cfg['password'], 'izayoi') + self.assertEqual(cfg['email'], 'sakuya@scarlet.net') + self.assertEqual(cfg.get('auth'), None) + + def test_load_config_with_random_name(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + + dockercfg_path = os.path.join(folder, + '.{0}.dockercfg'.format( + random.randrange(100000))) + registry = 'https://your.private.registry.io' + auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') + config = { + registry: { + 'auth': '{0}'.format(auth_), + 'email': 'sakuya@scarlet.net' + } + } + + with open(dockercfg_path, 'w') as f: + json.dump(config, f) + + cfg = auth.load_config(dockercfg_path) + assert registry in cfg + self.assertNotEqual(cfg[registry], None) + cfg = cfg[registry] + self.assertEqual(cfg['username'], 'sakuya') + self.assertEqual(cfg['password'], 'izayoi') + self.assertEqual(cfg['email'], 'sakuya@scarlet.net') + self.assertEqual(cfg.get('auth'), None) + + def test_load_config_custom_config_env(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + + dockercfg_path = os.path.join(folder, 'config.json') + registry = 'https://your.private.registry.io' + auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') + config = { + registry: { + 'auth': '{0}'.format(auth_), + 'email': 'sakuya@scarlet.net' + } + } + + with open(dockercfg_path, 'w') as f: + json.dump(config, f) + + with mock.patch.dict(os.environ, {'DOCKER_CONFIG': folder}): + cfg = auth.load_config(None) + assert registry in cfg + self.assertNotEqual(cfg[registry], None) + cfg = cfg[registry] + self.assertEqual(cfg['username'], 'sakuya') + self.assertEqual(cfg['password'], 'izayoi') + self.assertEqual(cfg['email'], 'sakuya@scarlet.net') + self.assertEqual(cfg.get('auth'), None) + + def test_load_config_custom_config_env_with_auths(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + + dockercfg_path = os.path.join(folder, 'config.json') + registry = 'https://your.private.registry.io' + auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') + config = { + 'auths': { + registry: { + 'auth': '{0}'.format(auth_), + 'email': 'sakuya@scarlet.net' + } + } + } + + with open(dockercfg_path, 'w') as f: + json.dump(config, f) + + with mock.patch.dict(os.environ, {'DOCKER_CONFIG': folder}): + cfg = auth.load_config(None) + assert registry in cfg + self.assertNotEqual(cfg[registry], None) + cfg = cfg[registry] + self.assertEqual(cfg['username'], 'sakuya') + self.assertEqual(cfg['password'], 'izayoi') + self.assertEqual(cfg['email'], 'sakuya@scarlet.net') + self.assertEqual(cfg.get('auth'), None) + + def test_load_config_custom_config_env_utf8(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + + dockercfg_path = os.path.join(folder, 'config.json') + registry = 'https://your.private.registry.io' + auth_ = base64.b64encode( + b'sakuya\xc3\xa6:izayoi\xc3\xa6').decode('ascii') + config = { + 'auths': { + registry: { + 'auth': '{0}'.format(auth_), + 'email': 'sakuya@scarlet.net' + } + } + } + + with open(dockercfg_path, 'w') as f: + json.dump(config, f) + + with mock.patch.dict(os.environ, {'DOCKER_CONFIG': folder}): + cfg = auth.load_config(None) + assert registry in cfg + self.assertNotEqual(cfg[registry], None) + cfg = cfg[registry] + self.assertEqual(cfg['username'], b'sakuya\xc3\xa6'.decode('utf8')) + self.assertEqual(cfg['password'], b'izayoi\xc3\xa6'.decode('utf8')) + self.assertEqual(cfg['email'], 'sakuya@scarlet.net') + self.assertEqual(cfg.get('auth'), None) + + def test_load_config_custom_config_env_with_headers(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + + dockercfg_path = os.path.join(folder, 'config.json') + config = { + 'HttpHeaders': { + 'Name': 'Spike', + 'Surname': 'Spiegel' + }, + } + + with open(dockercfg_path, 'w') as f: + json.dump(config, f) + + with mock.patch.dict(os.environ, {'DOCKER_CONFIG': folder}): + cfg = auth.load_config(None) + assert 'HttpHeaders' in cfg + self.assertNotEqual(cfg['HttpHeaders'], None) + cfg = cfg['HttpHeaders'] + + self.assertEqual(cfg['Name'], 'Spike') + self.assertEqual(cfg['Surname'], 'Spiegel') + + def test_load_config_unknown_keys(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + dockercfg_path = os.path.join(folder, 'config.json') + config = { + 'detachKeys': 'ctrl-q, ctrl-u, ctrl-i' + } + with open(dockercfg_path, 'w') as f: + json.dump(config, f) + + cfg = auth.load_config(dockercfg_path) + assert cfg == {} + + def test_load_config_invalid_auth_dict(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + dockercfg_path = os.path.join(folder, 'config.json') + config = { + 'auths': { + 'scarlet.net': {'sakuya': 'izayoi'} + } + } + with open(dockercfg_path, 'w') as f: + json.dump(config, f) + + self.assertRaises( + errors.InvalidConfigFile, auth.load_config, dockercfg_path + ) diff --git a/testbed/docker__docker-py/tests/unit/build_test.py b/testbed/docker__docker-py/tests/unit/build_test.py new file mode 100644 index 0000000000000000000000000000000000000000..414153ed5294dad31a6e4d3077ca33abfd988931 --- /dev/null +++ b/testbed/docker__docker-py/tests/unit/build_test.py @@ -0,0 +1,105 @@ +import gzip +import io + +import docker + +from .api_test import DockerClientTest + + +class BuildTest(DockerClientTest): + def test_build_container(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + + self.client.build(fileobj=script) + + def test_build_container_pull(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + + self.client.build(fileobj=script, pull=True) + + def test_build_container_stream(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + + self.client.build(fileobj=script, stream=True) + + def test_build_container_custom_context(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + context = docker.utils.mkbuildcontext(script) + + self.client.build(fileobj=context, custom_context=True) + + def test_build_container_custom_context_gzip(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + context = docker.utils.mkbuildcontext(script) + gz_context = gzip.GzipFile(fileobj=context) + + self.client.build( + fileobj=gz_context, + custom_context=True, + encoding="gzip" + ) + + def test_build_remote_with_registry_auth(self): + self.client._auth_configs = { + 'https://example.com': { + 'user': 'example', + 'password': 'example', + 'email': 'example@example.com' + } + } + + self.client.build(path='https://github.com/docker-library/mongo') + + def test_build_container_with_named_dockerfile(self): + self.client.build('.', dockerfile='nameddockerfile') + + def test_build_container_with_container_limits(self): + self.client.build('.', container_limits={ + 'memory': 1024 * 1024, + 'cpusetcpus': 1, + 'cpushares': 1000, + 'memswap': 1024 * 1024 * 8 + }) + + def test_build_container_invalid_container_limits(self): + self.assertRaises( + docker.errors.DockerException, + lambda: self.client.build('.', container_limits={ + 'foo': 'bar' + }) + ) diff --git a/testbed/docker__docker-py/tests/unit/container_test.py b/testbed/docker__docker-py/tests/unit/container_test.py new file mode 100644 index 0000000000000000000000000000000000000000..6e23f8921029236c69c273440832b027dd32e0ab --- /dev/null +++ b/testbed/docker__docker-py/tests/unit/container_test.py @@ -0,0 +1,1472 @@ +import datetime +import json +import signal + +import docker +import pytest +import six + +from . import fake_api +from ..base import requires_api_version +from .api_test import ( + DockerClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS, + fake_inspect_container +) + +try: + from unittest import mock +except ImportError: + import mock + + +def fake_inspect_container_tty(self, container): + return fake_inspect_container(self, container, tty=True) + + +class StartContainerTest(DockerClientTest): + def test_start_container(self): + self.client.start(fake_api.FAKE_CONTAINER_ID) + + args = fake_request.call_args + self.assertEqual( + args[0][1], + url_prefix + 'containers/3cc2351ab11b/start' + ) + self.assertEqual(json.loads(args[1]['data']), {}) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_start_container_none(self): + with pytest.raises(ValueError) as excinfo: + self.client.start(container=None) + + self.assertEqual( + str(excinfo.value), + 'image or container param is undefined', + ) + + with pytest.raises(ValueError) as excinfo: + self.client.start(None) + + self.assertEqual( + str(excinfo.value), + 'image or container param is undefined', + ) + + def test_start_container_regression_573(self): + self.client.start(**{'container': fake_api.FAKE_CONTAINER_ID}) + + def test_start_container_with_lxc_conf(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, + lxc_conf={'lxc.conf.k': 'lxc.conf.value'} + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_lxc_conf_compat(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, + lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_binds_ro(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, binds={ + '/tmp': { + "bind": '/mnt', + "ro": True + } + } + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_binds_rw(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, binds={ + '/tmp': {"bind": '/mnt', "ro": False} + } + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_port_binds(self): + self.maxDiff = None + + def call_start(): + self.client.start(fake_api.FAKE_CONTAINER_ID, port_bindings={ + 1111: None, + 2222: 2222, + '3333/udp': (3333,), + 4444: ('127.0.0.1',), + 5555: ('127.0.0.1', 5555), + 6666: [('127.0.0.1',), ('192.168.0.1',)] + }) + + pytest.deprecated_call(call_start) + + def test_start_container_with_links(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, links={'path': 'alias'} + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_multiple_links(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, + links={ + 'path1': 'alias1', + 'path2': 'alias2' + } + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_links_as_list_of_tuples(self): + def call_start(): + self.client.start(fake_api.FAKE_CONTAINER_ID, + links=[('path', 'alias')]) + + pytest.deprecated_call(call_start) + + def test_start_container_privileged(self): + def call_start(): + self.client.start(fake_api.FAKE_CONTAINER_ID, privileged=True) + + pytest.deprecated_call(call_start) + + def test_start_container_with_dict_instead_of_id(self): + self.client.start({'Id': fake_api.FAKE_CONTAINER_ID}) + + args = fake_request.call_args + self.assertEqual( + args[0][1], + url_prefix + 'containers/3cc2351ab11b/start' + ) + self.assertEqual(json.loads(args[1]['data']), {}) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + +class CreateContainerTest(DockerClientTest): + def test_create_container(self): + self.client.create_container('busybox', 'true') + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", "Cmd": ["true"], + "AttachStdin": false, + "AttachStderr": true, "AttachStdout": true, + "StdinOnce": false, + "OpenStdin": false, "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_binds(self): + mount_dest = '/mnt' + + self.client.create_container('busybox', ['ls', mount_dest], + volumes=[mount_dest]) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls", "/mnt"], "AttachStdin": false, + "Volumes": {"/mnt": {}}, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_volume_string(self): + mount_dest = '/mnt' + + self.client.create_container('busybox', ['ls', mount_dest], + volumes=mount_dest) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls", "/mnt"], "AttachStdin": false, + "Volumes": {"/mnt": {}}, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_ports(self): + self.client.create_container('busybox', 'ls', + ports=[1111, (2222, 'udp'), (3333,)]) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls"], "AttachStdin": false, + "ExposedPorts": { + "1111/tcp": {}, + "2222/udp": {}, + "3333/tcp": {} + }, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_entrypoint(self): + self.client.create_container('busybox', 'hello', + entrypoint='cowsay entry') + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["hello"], "AttachStdin": false, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false, + "Entrypoint": ["cowsay", "entry"]}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_cpu_shares(self): + self.client.create_container('busybox', 'ls', + cpu_shares=5) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls"], "AttachStdin": false, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false, + "CpuShares": 5}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_cpuset(self): + self.client.create_container('busybox', 'ls', + cpuset='0,1') + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls"], "AttachStdin": false, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false, + "Cpuset": "0,1", + "CpusetCpus": "0,1"}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_cgroup_parent(self): + self.client.create_container( + 'busybox', 'ls', host_config=self.client.create_host_config( + cgroup_parent='test' + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + data = json.loads(args[1]['data']) + self.assertIn('HostConfig', data) + self.assertIn('CgroupParent', data['HostConfig']) + self.assertEqual(data['HostConfig']['CgroupParent'], 'test') + + def test_create_container_with_working_dir(self): + self.client.create_container('busybox', 'ls', + working_dir='/root') + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls"], "AttachStdin": false, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false, + "WorkingDir": "/root"}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_stdin_open(self): + self.client.create_container('busybox', 'true', stdin_open=True) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", "Cmd": ["true"], + "AttachStdin": true, + "AttachStderr": true, "AttachStdout": true, + "StdinOnce": true, + "OpenStdin": true, "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_volumes_from(self): + vol_names = ['foo', 'bar'] + try: + self.client.create_container('busybox', 'true', + volumes_from=vol_names) + except docker.errors.DockerException: + self.assertTrue( + docker.utils.compare_version('1.10', self.client._version) >= 0 + ) + return + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data'])['VolumesFrom'], + ','.join(vol_names)) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_empty_volumes_from(self): + self.client.create_container('busybox', 'true', volumes_from=[]) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertTrue('VolumesFrom' not in data) + + def test_create_named_container(self): + self.client.create_container('busybox', 'true', + name='marisa-kirisame') + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", "Cmd": ["true"], + "AttachStdin": false, + "AttachStderr": true, "AttachStdout": true, + "StdinOnce": false, + "OpenStdin": false, "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual(args[1]['params'], {'name': 'marisa-kirisame'}) + + def test_create_container_with_mem_limit_as_int(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + mem_limit=128.0 + ) + ) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertEqual(data['HostConfig']['Memory'], 128.0) + + def test_create_container_with_mem_limit_as_string(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + mem_limit='128' + ) + ) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertEqual(data['HostConfig']['Memory'], 128.0) + + def test_create_container_with_mem_limit_as_string_with_k_unit(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + mem_limit='128k' + ) + ) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024) + + def test_create_container_with_mem_limit_as_string_with_m_unit(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + mem_limit='128m' + ) + ) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024 * 1024) + + def test_create_container_with_mem_limit_as_string_with_g_unit(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + mem_limit='128g' + ) + ) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertEqual( + data['HostConfig']['Memory'], 128.0 * 1024 * 1024 * 1024 + ) + + def test_create_container_with_mem_limit_as_string_with_wrong_value(self): + self.assertRaises( + docker.errors.DockerException, + self.client.create_host_config, mem_limit='128p' + ) + + self.assertRaises( + docker.errors.DockerException, + self.client.create_host_config, mem_limit='1f28' + ) + + def test_create_container_with_lxc_conf(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + lxc_conf={'lxc.conf.k': 'lxc.conf.value'} + ) + ) + + args = fake_request.call_args + self.assertEqual( + args[0][1], + url_prefix + 'containers/create' + ) + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['LxcConf'] = [ + {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} + ] + + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], + {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_lxc_conf_compat(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['LxcConf'] = [ + {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} + ] + self.assertEqual( + json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_binds_ro(self): + mount_dest = '/mnt' + mount_origin = '/tmp' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + binds={mount_origin: { + "bind": mount_dest, + "ro": True + }} + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:ro"] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_binds_rw(self): + mount_dest = '/mnt' + mount_origin = '/tmp' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + binds={mount_origin: { + "bind": mount_dest, + "ro": False + }} + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:rw"] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_binds_mode(self): + mount_dest = '/mnt' + mount_origin = '/tmp' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + binds={mount_origin: { + "bind": mount_dest, + "mode": "z", + }} + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:z"] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_binds_mode_and_ro_error(self): + with pytest.raises(ValueError): + mount_dest = '/mnt' + mount_origin = '/tmp' + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + binds={mount_origin: { + "bind": mount_dest, + "mode": "z", + "ro": True, + }} + ) + ) + + def test_create_container_with_binds_list(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + binds=[ + "/tmp:/mnt/1:ro", + "/tmp:/mnt/2", + ], + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Binds'] = [ + "/tmp:/mnt/1:ro", + "/tmp:/mnt/2", + ] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_port_binds(self): + self.maxDiff = None + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + port_bindings={ + 1111: None, + 2222: 2222, + '3333/udp': (3333,), + 4444: ('127.0.0.1',), + 5555: ('127.0.0.1', 5555), + 6666: [('127.0.0.1',), ('192.168.0.1',)] + } + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + data = json.loads(args[1]['data']) + port_bindings = data['HostConfig']['PortBindings'] + self.assertTrue('1111/tcp' in port_bindings) + self.assertTrue('2222/tcp' in port_bindings) + self.assertTrue('3333/udp' in port_bindings) + self.assertTrue('4444/tcp' in port_bindings) + self.assertTrue('5555/tcp' in port_bindings) + self.assertTrue('6666/tcp' in port_bindings) + self.assertEqual( + [{"HostPort": "", "HostIp": ""}], + port_bindings['1111/tcp'] + ) + self.assertEqual( + [{"HostPort": "2222", "HostIp": ""}], + port_bindings['2222/tcp'] + ) + self.assertEqual( + [{"HostPort": "3333", "HostIp": ""}], + port_bindings['3333/udp'] + ) + self.assertEqual( + [{"HostPort": "", "HostIp": "127.0.0.1"}], + port_bindings['4444/tcp'] + ) + self.assertEqual( + [{"HostPort": "5555", "HostIp": "127.0.0.1"}], + port_bindings['5555/tcp'] + ) + self.assertEqual(len(port_bindings['6666/tcp']), 2) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_mac_address(self): + mac_address_expected = "02:42:ac:11:00:0a" + + container = self.client.create_container( + 'busybox', ['sleep', '60'], mac_address=mac_address_expected) + + res = self.client.inspect_container(container['Id']) + self.assertEqual(mac_address_expected, + res['NetworkSettings']['MacAddress']) + + def test_create_container_with_links(self): + link_path = 'path' + alias = 'alias' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + links={link_path: alias} + ) + ) + + args = fake_request.call_args + self.assertEqual( + args[0][1], url_prefix + 'containers/create' + ) + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Links'] = ['path:alias'] + + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + + def test_create_container_with_multiple_links(self): + link_path = 'path' + alias = 'alias' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + links={ + link_path + '1': alias + '1', + link_path + '2': alias + '2' + } + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Links'] = [ + 'path1:alias1', 'path2:alias2' + ] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + + def test_create_container_with_links_as_list_of_tuples(self): + link_path = 'path' + alias = 'alias' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + links=[(link_path, alias)] + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Links'] = ['path:alias'] + + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + + def test_create_container_privileged(self): + self.client.create_container( + 'busybox', 'true', + host_config=self.client.create_host_config(privileged=True) + ) + + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Privileged'] = True + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_restart_policy(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + restart_policy={ + "Name": "always", + "MaximumRetryCount": 0 + } + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['RestartPolicy'] = { + "MaximumRetryCount": 0, "Name": "always" + } + self.assertEqual(json.loads(args[1]['data']), expected_payload) + + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_added_capabilities(self): + self.client.create_container( + 'busybox', 'true', + host_config=self.client.create_host_config(cap_add=['MKNOD']) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['CapAdd'] = ['MKNOD'] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_dropped_capabilities(self): + self.client.create_container( + 'busybox', 'true', + host_config=self.client.create_host_config(cap_drop=['MKNOD']) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['CapDrop'] = ['MKNOD'] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_devices(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + devices=['/dev/sda:/dev/xvda:rwm', + '/dev/sdb:/dev/xvdb', + '/dev/sdc'] + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Devices'] = [ + {'CgroupPermissions': 'rwm', + 'PathInContainer': '/dev/xvda', + 'PathOnHost': '/dev/sda'}, + {'CgroupPermissions': 'rwm', + 'PathInContainer': '/dev/xvdb', + 'PathOnHost': '/dev/sdb'}, + {'CgroupPermissions': 'rwm', + 'PathInContainer': '/dev/sdc', + 'PathOnHost': '/dev/sdc'} + ] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_labels_dict(self): + labels_dict = { + six.text_type('foo'): six.text_type('1'), + six.text_type('bar'): six.text_type('2'), + } + + self.client.create_container( + 'busybox', 'true', + labels=labels_dict, + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_labels_list(self): + labels_list = [ + six.text_type('foo'), + six.text_type('bar'), + ] + labels_dict = { + six.text_type('foo'): six.text_type(), + six.text_type('bar'): six.text_type(), + } + + self.client.create_container( + 'busybox', 'true', + labels=labels_list, + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_named_volume(self): + mount_dest = '/mnt' + volume_name = 'name' + + self.client.create_container( + 'busybox', 'true', + host_config=self.client.create_host_config( + binds={volume_name: { + "bind": mount_dest, + "ro": False + }}), + volume_driver='foodriver', + ) + + args = fake_request.call_args + self.assertEqual( + args[0][1], url_prefix + 'containers/create' + ) + expected_payload = self.base_create_payload() + expected_payload['VolumeDriver'] = 'foodriver' + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Binds'] = ["name:/mnt:rw"] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_stop_signal(self): + self.client.create_container('busybox', 'ls', + stop_signal='SIGINT') + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls"], "AttachStdin": false, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false, + "StopSignal": "SIGINT"}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + @requires_api_version('1.22') + def test_create_container_with_aliases(self): + self.client.create_container( + 'busybox', 'ls', + host_config=self.client.create_host_config( + network_mode='some-network', + ), + networking_config=self.client.create_networking_config({ + 'some-network': self.client.create_endpoint_config( + aliases=['foo', 'bar'], + ), + }), + ) + + args = fake_request.call_args + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls"], "AttachStdin": false, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false, + "HostConfig": { + "NetworkMode": "some-network" + }, + "NetworkingConfig": { + "EndpointsConfig": { + "some-network": {"Aliases": ["foo", "bar"]} + } + }}''')) + + +class ContainerTest(DockerClientTest): + def test_list_containers(self): + self.client.containers(all=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/json', + params={ + 'all': 1, + 'since': None, + 'size': 0, + 'limit': -1, + 'trunc_cmd': 0, + 'before': None + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_resize_container(self): + self.client.resize( + {'Id': fake_api.FAKE_CONTAINER_ID}, + height=15, + width=120 + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/resize', + params={'h': 15, 'w': 120}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_rename_container(self): + self.client.rename( + {'Id': fake_api.FAKE_CONTAINER_ID}, + name='foobar' + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/rename', + params={'name': 'foobar'}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_wait(self): + self.client.wait(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/wait', + timeout=None + ) + + def test_wait_with_dict_instead_of_id(self): + self.client.wait({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/wait', + timeout=None + ) + + def test_logs(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + logs = self.client.logs(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=False + ) + + self.assertEqual( + logs, + 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') + ) + + def test_logs_with_dict_instead_of_id(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + logs = self.client.logs({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=False + ) + + self.assertEqual( + logs, + 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') + ) + + def test_log_streaming(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True, + follow=False) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=True + ) + + def test_log_following(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, + follow=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=False + ) + + def test_log_following_backwards(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=True + ) + + def test_log_streaming_and_following(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True, + follow=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=True + ) + + def test_log_tail(self): + + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, + follow=False, tail=10) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, + 'tail': 10}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=False + ) + + def test_log_since(self): + ts = 809222400 + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, + follow=False, since=ts) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, + 'tail': 'all', 'since': ts}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=False + ) + + def test_log_since_with_datetime(self): + ts = 809222400 + time = datetime.datetime.utcfromtimestamp(ts) + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, + follow=False, since=time) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, + 'tail': 'all', 'since': ts}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=False + ) + + def test_log_tty(self): + m = mock.Mock() + with mock.patch('docker.Client.inspect_container', + fake_inspect_container_tty): + with mock.patch('docker.Client._stream_raw_result', + m): + self.client.logs(fake_api.FAKE_CONTAINER_ID, + follow=True, stream=True) + + self.assertTrue(m.called) + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=True + ) + + def test_diff(self): + self.client.diff(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/changes', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_diff_with_dict_instead_of_id(self): + self.client.diff({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/changes', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_port(self): + self.client.port({'Id': fake_api.FAKE_CONTAINER_ID}, 1111) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/json', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_stop_container(self): + timeout = 2 + + self.client.stop(fake_api.FAKE_CONTAINER_ID, timeout=timeout) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/stop', + params={'t': timeout}, + timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) + ) + + def test_stop_container_with_dict_instead_of_id(self): + timeout = 2 + + self.client.stop({'Id': fake_api.FAKE_CONTAINER_ID}, + timeout=timeout) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/stop', + params={'t': timeout}, + timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) + ) + + def test_pause_container(self): + self.client.pause(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/pause', + timeout=(DEFAULT_TIMEOUT_SECONDS) + ) + + def test_unpause_container(self): + self.client.unpause(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/unpause', + timeout=(DEFAULT_TIMEOUT_SECONDS) + ) + + def test_kill_container(self): + self.client.kill(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/kill', + params={}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_kill_container_with_dict_instead_of_id(self): + self.client.kill({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/kill', + params={}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_kill_container_with_signal(self): + self.client.kill(fake_api.FAKE_CONTAINER_ID, signal=signal.SIGTERM) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/kill', + params={'signal': signal.SIGTERM}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_restart_container(self): + self.client.restart(fake_api.FAKE_CONTAINER_ID, timeout=2) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/restart', + params={'t': 2}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_restart_container_with_dict_instead_of_id(self): + self.client.restart({'Id': fake_api.FAKE_CONTAINER_ID}, timeout=2) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/restart', + params={'t': 2}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_remove_container(self): + self.client.remove_container(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'DELETE', + url_prefix + 'containers/3cc2351ab11b', + params={'v': False, 'link': False, 'force': False}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_remove_container_with_dict_instead_of_id(self): + self.client.remove_container({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'DELETE', + url_prefix + 'containers/3cc2351ab11b', + params={'v': False, 'link': False, 'force': False}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_export(self): + self.client.export(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/export', + stream=True, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_export_with_dict_instead_of_id(self): + self.client.export({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/export', + stream=True, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_inspect_container(self): + self.client.inspect_container(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/json', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_inspect_container_undefined_id(self): + for arg in None, '', {True: True}: + with pytest.raises(docker.errors.NullResource) as excinfo: + self.client.inspect_container(arg) + + self.assertEqual( + excinfo.value.args[0], 'image or container param is undefined' + ) + + def test_container_stats(self): + self.client.stats(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/stats', + timeout=60, + stream=True + ) + + def test_container_top(self): + self.client.top(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/top', + params={}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_container_top_with_psargs(self): + self.client.top(fake_api.FAKE_CONTAINER_ID, 'waux') + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/top', + params={'ps_args': 'waux'}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + @requires_api_version('1.22') + def test_container_update(self): + self.client.update_container( + fake_api.FAKE_CONTAINER_ID, mem_limit='2k', cpu_shares=124, + blkio_weight=345 + ) + args = fake_request.call_args + self.assertEqual( + args[0][1], url_prefix + 'containers/3cc2351ab11b/update' + ) + self.assertEqual( + json.loads(args[1]['data']), + {'Memory': 2 * 1024, 'CpuShares': 124, 'BlkioWeight': 345} + ) + self.assertEqual( + args[1]['headers']['Content-Type'], 'application/json' + ) diff --git a/testbed/docker__docker-py/tests/unit/exec_test.py b/testbed/docker__docker-py/tests/unit/exec_test.py new file mode 100644 index 0000000000000000000000000000000000000000..3007799cb58d50ece01e97d914d04f7be0f093db --- /dev/null +++ b/testbed/docker__docker-py/tests/unit/exec_test.py @@ -0,0 +1,75 @@ +import json + +from . import fake_api +from .api_test import ( + DockerClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS, +) + + +class ExecTest(DockerClientTest): + def test_exec_create(self): + self.client.exec_create(fake_api.FAKE_CONTAINER_ID, ['ls', '-1']) + + args = fake_request.call_args + self.assertEqual( + 'POST', + args[0][0], url_prefix + 'containers/{0}/exec'.format( + fake_api.FAKE_CONTAINER_ID + ) + ) + + self.assertEqual( + json.loads(args[1]['data']), { + 'Tty': False, + 'AttachStdout': True, + 'Container': fake_api.FAKE_CONTAINER_ID, + 'Cmd': ['ls', '-1'], + 'Privileged': False, + 'AttachStdin': False, + 'AttachStderr': True, + 'User': '' + } + ) + + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_exec_start(self): + self.client.exec_start(fake_api.FAKE_EXEC_ID) + + args = fake_request.call_args + self.assertEqual( + args[0][1], url_prefix + 'exec/{0}/start'.format( + fake_api.FAKE_EXEC_ID + ) + ) + + self.assertEqual( + json.loads(args[1]['data']), { + 'Tty': False, + 'Detach': False, + } + ) + + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_exec_inspect(self): + self.client.exec_inspect(fake_api.FAKE_EXEC_ID) + + args = fake_request.call_args + self.assertEqual( + args[0][1], url_prefix + 'exec/{0}/json'.format( + fake_api.FAKE_EXEC_ID + ) + ) + + def test_exec_resize(self): + self.client.exec_resize(fake_api.FAKE_EXEC_ID, height=20, width=60) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'exec/{0}/resize'.format(fake_api.FAKE_EXEC_ID), + params={'h': 20, 'w': 60}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) diff --git a/testbed/docker__docker-py/tests/unit/fake_api.py b/testbed/docker__docker-py/tests/unit/fake_api.py new file mode 100644 index 0000000000000000000000000000000000000000..99525956d0b38edbab5ce81e383ea1a86acfa98e --- /dev/null +++ b/testbed/docker__docker-py/tests/unit/fake_api.py @@ -0,0 +1,549 @@ +# Copyright 2013 dotCloud inc. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import fake_stat +from docker import constants + +CURRENT_VERSION = 'v{0}'.format(constants.DEFAULT_DOCKER_API_VERSION) + +FAKE_CONTAINER_ID = '3cc2351ab11b' +FAKE_IMAGE_ID = 'e9aa60c60128' +FAKE_EXEC_ID = 'd5d177f121dc' +FAKE_IMAGE_NAME = 'test_image' +FAKE_TARBALL_PATH = '/path/to/tarball' +FAKE_REPO_NAME = 'repo' +FAKE_TAG_NAME = 'tag' +FAKE_FILE_NAME = 'file' +FAKE_URL = 'myurl' +FAKE_PATH = '/path' +FAKE_VOLUME_NAME = 'perfectcherryblossom' + +# Each method is prefixed with HTTP method (get, post...) +# for clarity and readability + + +def get_fake_raw_version(): + status_code = 200 + response = { + "ApiVersion": "1.18", + "GitCommit": "fake-commit", + "GoVersion": "go1.3.3", + "Version": "1.5.0" + } + return status_code, response + + +def get_fake_version(): + status_code = 200 + response = {'GoVersion': '1', 'Version': '1.1.1', + 'GitCommit': 'deadbeef+CHANGES'} + return status_code, response + + +def get_fake_info(): + status_code = 200 + response = {'Containers': 1, 'Images': 1, 'Debug': False, + 'MemoryLimit': False, 'SwapLimit': False, + 'IPv4Forwarding': True} + return status_code, response + + +def get_fake_search(): + status_code = 200 + response = [{'Name': 'busybox', 'Description': 'Fake Description'}] + return status_code, response + + +def get_fake_images(): + status_code = 200 + response = [{ + 'Id': FAKE_IMAGE_ID, + 'Created': '2 days ago', + 'Repository': 'busybox', + 'RepoTags': ['busybox:latest', 'busybox:1.0'], + }] + return status_code, response + + +def get_fake_image_history(): + status_code = 200 + response = [ + { + "Id": "b750fe79269d", + "Created": 1364102658, + "CreatedBy": "/bin/bash" + }, + { + "Id": "27cf78414709", + "Created": 1364068391, + "CreatedBy": "" + } + ] + + return status_code, response + + +def post_fake_import_image(): + status_code = 200 + response = 'Import messages...' + + return status_code, response + + +def get_fake_containers(): + status_code = 200 + response = [{ + 'Id': FAKE_CONTAINER_ID, + 'Image': 'busybox:latest', + 'Created': '2 days ago', + 'Command': 'true', + 'Status': 'fake status' + }] + return status_code, response + + +def post_fake_start_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_resize_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_create_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def get_fake_inspect_container(tty=False): + status_code = 200 + response = { + 'Id': FAKE_CONTAINER_ID, + 'Config': {'Privileged': True, 'Tty': tty}, + 'ID': FAKE_CONTAINER_ID, + 'Image': 'busybox:latest', + "State": { + "Running": True, + "Pid": 0, + "ExitCode": 0, + "StartedAt": "2013-09-25T14:01:18.869545111+02:00", + "Ghost": False + }, + "MacAddress": "02:42:ac:11:00:0a" + } + return status_code, response + + +def get_fake_inspect_image(): + status_code = 200 + response = { + 'id': FAKE_IMAGE_ID, + 'parent': "27cf784147099545", + 'created': "2013-03-23T22:24:18.818426-07:00", + 'container': FAKE_CONTAINER_ID, + 'container_config': + { + "Hostname": "", + "User": "", + "Memory": 0, + "MemorySwap": 0, + "AttachStdin": False, + "AttachStdout": False, + "AttachStderr": False, + "PortSpecs": "", + "Tty": True, + "OpenStdin": True, + "StdinOnce": False, + "Env": "", + "Cmd": ["/bin/bash"], + "Dns": "", + "Image": "base", + "Volumes": "", + "VolumesFrom": "", + "WorkingDir": "" + }, + 'Size': 6823592 + } + return status_code, response + + +def get_fake_port(): + status_code = 200 + response = { + 'HostConfig': { + 'Binds': None, + 'ContainerIDFile': '', + 'Links': None, + 'LxcConf': None, + 'PortBindings': { + '1111': None, + '1111/tcp': [{'HostIp': '127.0.0.1', 'HostPort': '4567'}], + '2222': None + }, + 'Privileged': False, + 'PublishAllPorts': False + }, + 'NetworkSettings': { + 'Bridge': 'docker0', + 'PortMapping': None, + 'Ports': { + '1111': None, + '1111/tcp': [{'HostIp': '127.0.0.1', 'HostPort': '4567'}], + '2222': None}, + 'MacAddress': '02:42:ac:11:00:0a' + } + } + return status_code, response + + +def get_fake_insert_image(): + status_code = 200 + response = {'StatusCode': 0} + return status_code, response + + +def get_fake_wait(): + status_code = 200 + response = {'StatusCode': 0} + return status_code, response + + +def get_fake_logs(): + status_code = 200 + response = (b'\x01\x00\x00\x00\x00\x00\x00\x11Flowering Nights\n' + b'\x01\x00\x00\x00\x00\x00\x00\x10(Sakuya Iyazoi)\n') + return status_code, response + + +def get_fake_diff(): + status_code = 200 + response = [{'Path': '/test', 'Kind': 1}] + return status_code, response + + +def get_fake_events(): + status_code = 200 + response = [{'status': 'stop', 'id': FAKE_CONTAINER_ID, + 'from': FAKE_IMAGE_ID, 'time': 1423247867}] + return status_code, response + + +def get_fake_export(): + status_code = 200 + response = 'Byte Stream....' + return status_code, response + + +def post_fake_exec_create(): + status_code = 200 + response = {'Id': FAKE_EXEC_ID} + return status_code, response + + +def post_fake_exec_start(): + status_code = 200 + response = (b'\x01\x00\x00\x00\x00\x00\x00\x11bin\nboot\ndev\netc\n' + b'\x01\x00\x00\x00\x00\x00\x00\x12lib\nmnt\nproc\nroot\n' + b'\x01\x00\x00\x00\x00\x00\x00\x0csbin\nusr\nvar\n') + return status_code, response + + +def post_fake_exec_resize(): + status_code = 201 + return status_code, '' + + +def get_fake_exec_inspect(): + return 200, { + 'OpenStderr': True, + 'OpenStdout': True, + 'Container': get_fake_inspect_container()[1], + 'Running': False, + 'ProcessConfig': { + 'arguments': ['hello world'], + 'tty': False, + 'entrypoint': 'echo', + 'privileged': False, + 'user': '' + }, + 'ExitCode': 0, + 'ID': FAKE_EXEC_ID, + 'OpenStdin': False + } + + +def post_fake_stop_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_kill_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_pause_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_unpause_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_restart_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_rename_container(): + status_code = 204 + return status_code, None + + +def delete_fake_remove_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_image_create(): + status_code = 200 + response = {'Id': FAKE_IMAGE_ID} + return status_code, response + + +def delete_fake_remove_image(): + status_code = 200 + response = {'Id': FAKE_IMAGE_ID} + return status_code, response + + +def get_fake_get_image(): + status_code = 200 + response = 'Byte Stream....' + return status_code, response + + +def post_fake_load_image(): + status_code = 200 + response = {'Id': FAKE_IMAGE_ID} + return status_code, response + + +def post_fake_commit(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_push(): + status_code = 200 + response = {'Id': FAKE_IMAGE_ID} + return status_code, response + + +def post_fake_build_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_tag_image(): + status_code = 200 + response = {'Id': FAKE_IMAGE_ID} + return status_code, response + + +def get_fake_stats(): + status_code = 200 + response = fake_stat.OBJ + return status_code, response + + +def get_fake_top(): + return 200, { + 'Processes': [ + [ + 'root', + '26501', + '6907', + '0', + '10:32', + 'pts/55', + '00:00:00', + 'sleep 60', + ], + ], + 'Titles': [ + 'UID', + 'PID', + 'PPID', + 'C', + 'STIME', + 'TTY', + 'TIME', + 'CMD', + ], + } + + +def get_fake_volume_list(): + status_code = 200 + response = { + 'Volumes': [ + { + 'Name': 'perfectcherryblossom', + 'Driver': 'local', + 'Mountpoint': '/var/lib/docker/volumes/perfectcherryblossom' + }, { + 'Name': 'subterraneananimism', + 'Driver': 'local', + 'Mountpoint': '/var/lib/docker/volumes/subterraneananimism' + } + ] + } + return status_code, response + + +def get_fake_volume(): + status_code = 200 + response = { + 'Name': 'perfectcherryblossom', + 'Driver': 'local', + 'Mountpoint': '/var/lib/docker/volumes/perfectcherryblossom' + } + return status_code, response + + +def fake_remove_volume(): + return 204, None + + +def post_fake_update_container(): + return 200, {'Warnings': []} + + +# Maps real api url to fake response callback +prefix = 'http+docker://localunixsocket' +fake_responses = { + '{0}/version'.format(prefix): + get_fake_raw_version, + '{1}/{0}/version'.format(CURRENT_VERSION, prefix): + get_fake_version, + '{1}/{0}/info'.format(CURRENT_VERSION, prefix): + get_fake_info, + '{1}/{0}/images/search'.format(CURRENT_VERSION, prefix): + get_fake_search, + '{1}/{0}/images/json'.format(CURRENT_VERSION, prefix): + get_fake_images, + '{1}/{0}/images/test_image/history'.format(CURRENT_VERSION, prefix): + get_fake_image_history, + '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): + post_fake_import_image, + '{1}/{0}/containers/json'.format(CURRENT_VERSION, prefix): + get_fake_containers, + '{1}/{0}/containers/3cc2351ab11b/start'.format(CURRENT_VERSION, prefix): + post_fake_start_container, + '{1}/{0}/containers/3cc2351ab11b/resize'.format(CURRENT_VERSION, prefix): + post_fake_resize_container, + '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix): + get_fake_inspect_container, + '{1}/{0}/containers/3cc2351ab11b/rename'.format(CURRENT_VERSION, prefix): + post_fake_rename_container, + '{1}/{0}/images/e9aa60c60128/tag'.format(CURRENT_VERSION, prefix): + post_fake_tag_image, + '{1}/{0}/containers/3cc2351ab11b/wait'.format(CURRENT_VERSION, prefix): + get_fake_wait, + '{1}/{0}/containers/3cc2351ab11b/logs'.format(CURRENT_VERSION, prefix): + get_fake_logs, + '{1}/{0}/containers/3cc2351ab11b/changes'.format(CURRENT_VERSION, prefix): + get_fake_diff, + '{1}/{0}/containers/3cc2351ab11b/export'.format(CURRENT_VERSION, prefix): + get_fake_export, + '{1}/{0}/containers/3cc2351ab11b/update'.format(CURRENT_VERSION, prefix): + post_fake_update_container, + '{1}/{0}/containers/3cc2351ab11b/exec'.format(CURRENT_VERSION, prefix): + post_fake_exec_create, + '{1}/{0}/exec/d5d177f121dc/start'.format(CURRENT_VERSION, prefix): + post_fake_exec_start, + '{1}/{0}/exec/d5d177f121dc/json'.format(CURRENT_VERSION, prefix): + get_fake_exec_inspect, + '{1}/{0}/exec/d5d177f121dc/resize'.format(CURRENT_VERSION, prefix): + post_fake_exec_resize, + + '{1}/{0}/containers/3cc2351ab11b/stats'.format(CURRENT_VERSION, prefix): + get_fake_stats, + '{1}/{0}/containers/3cc2351ab11b/top'.format(CURRENT_VERSION, prefix): + get_fake_top, + '{1}/{0}/containers/3cc2351ab11b/stop'.format(CURRENT_VERSION, prefix): + post_fake_stop_container, + '{1}/{0}/containers/3cc2351ab11b/kill'.format(CURRENT_VERSION, prefix): + post_fake_kill_container, + '{1}/{0}/containers/3cc2351ab11b/pause'.format(CURRENT_VERSION, prefix): + post_fake_pause_container, + '{1}/{0}/containers/3cc2351ab11b/unpause'.format(CURRENT_VERSION, prefix): + post_fake_unpause_container, + '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix): + get_fake_port, + '{1}/{0}/containers/3cc2351ab11b/restart'.format(CURRENT_VERSION, prefix): + post_fake_restart_container, + '{1}/{0}/containers/3cc2351ab11b'.format(CURRENT_VERSION, prefix): + delete_fake_remove_container, + '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): + post_fake_image_create, + '{1}/{0}/images/e9aa60c60128'.format(CURRENT_VERSION, prefix): + delete_fake_remove_image, + '{1}/{0}/images/e9aa60c60128/get'.format(CURRENT_VERSION, prefix): + get_fake_get_image, + '{1}/{0}/images/load'.format(CURRENT_VERSION, prefix): + post_fake_load_image, + '{1}/{0}/images/test_image/json'.format(CURRENT_VERSION, prefix): + get_fake_inspect_image, + '{1}/{0}/images/test_image/insert'.format(CURRENT_VERSION, prefix): + get_fake_insert_image, + '{1}/{0}/images/test_image/push'.format(CURRENT_VERSION, prefix): + post_fake_push, + '{1}/{0}/commit'.format(CURRENT_VERSION, prefix): + post_fake_commit, + '{1}/{0}/containers/create'.format(CURRENT_VERSION, prefix): + post_fake_create_container, + '{1}/{0}/build'.format(CURRENT_VERSION, prefix): + post_fake_build_container, + '{1}/{0}/events'.format(CURRENT_VERSION, prefix): + get_fake_events, + ('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'GET'): + get_fake_volume_list, + ('{1}/{0}/volumes/create'.format(CURRENT_VERSION, prefix), 'POST'): + get_fake_volume, + ('{1}/{0}/volumes/{2}'.format( + CURRENT_VERSION, prefix, FAKE_VOLUME_NAME + ), 'GET'): + get_fake_volume, + ('{1}/{0}/volumes/{2}'.format( + CURRENT_VERSION, prefix, FAKE_VOLUME_NAME + ), 'DELETE'): + fake_remove_volume, +} diff --git a/testbed/docker__docker-py/tests/unit/fake_stat.py b/testbed/docker__docker-py/tests/unit/fake_stat.py new file mode 100644 index 0000000000000000000000000000000000000000..a7f10293afe626f440010cbdf3604adb4d78f791 --- /dev/null +++ b/testbed/docker__docker-py/tests/unit/fake_stat.py @@ -0,0 +1,133 @@ +OBJ = { + "read": "2015-02-11T19:20:46.667237763+02:00", + "network": { + "rx_bytes": 567224, + "rx_packets": 3773, + "rx_errors": 0, + "rx_dropped": 0, + "tx_bytes": 1176, + "tx_packets": 13, + "tx_errors": 0, + "tx_dropped": 0 + }, + "cpu_stats": { + "cpu_usage": { + "total_usage": 157260874053, + "percpu_usage": [ + 52196306950, + 24118413549, + 53292684398, + 27653469156 + ], + "usage_in_kernelmode": 37140000000, + "usage_in_usermode": 62140000000 + }, + "system_cpu_usage": 3.0881377e+14, + "throttling_data": { + "periods": 0, + "throttled_periods": 0, + "throttled_time": 0 + } + }, + "memory_stats": { + "usage": 179314688, + "max_usage": 258166784, + "stats": { + "active_anon": 90804224, + "active_file": 2195456, + "cache": 3096576, + "hierarchical_memory_limit": 1.844674407371e+19, + "inactive_anon": 85516288, + "inactive_file": 798720, + "mapped_file": 2646016, + "pgfault": 101034, + "pgmajfault": 1207, + "pgpgin": 115814, + "pgpgout": 75613, + "rss": 176218112, + "rss_huge": 12582912, + "total_active_anon": 90804224, + "total_active_file": 2195456, + "total_cache": 3096576, + "total_inactive_anon": 85516288, + "total_inactive_file": 798720, + "total_mapped_file": 2646016, + "total_pgfault": 101034, + "total_pgmajfault": 1207, + "total_pgpgin": 115814, + "total_pgpgout": 75613, + "total_rss": 176218112, + "total_rss_huge": 12582912, + "total_unevictable": 0, + "total_writeback": 0, + "unevictable": 0, + "writeback": 0 + }, + "failcnt": 0, + "limit": 8039038976 + }, + "blkio_stats": { + "io_service_bytes_recursive": [ + { + "major": 8, + "minor": 0, + "op": "Read", + "value": 72843264 + }, { + "major": 8, + "minor": 0, + "op": "Write", + "value": 4096 + }, { + "major": 8, + "minor": 0, + "op": "Sync", + "value": 4096 + }, { + "major": 8, + "minor": 0, + "op": "Async", + "value": 72843264 + }, { + "major": 8, + "minor": 0, + "op": "Total", + "value": 72847360 + } + ], + "io_serviced_recursive": [ + { + "major": 8, + "minor": 0, + "op": "Read", + "value": 10581 + }, { + "major": 8, + "minor": 0, + "op": "Write", + "value": 1 + }, { + "major": 8, + "minor": 0, + "op": "Sync", + "value": 1 + }, { + "major": 8, + "minor": 0, + "op": "Async", + "value": 10581 + }, { + "major": 8, + "minor": 0, + "op": "Total", + "value": 10582 + } + ], + "io_queue_recursive": [], + "io_service_time_recursive": [], + "io_wait_time_recursive": [], + "io_merged_recursive": [], + "io_time_recursive": [], + "sectors_recursive": [] + } +} diff --git a/testbed/docker__docker-py/tests/unit/image_test.py b/testbed/docker__docker-py/tests/unit/image_test.py new file mode 100644 index 0000000000000000000000000000000000000000..a46e48eb1c4d5f47a8d6861407a61fe5463e9604 --- /dev/null +++ b/testbed/docker__docker-py/tests/unit/image_test.py @@ -0,0 +1,346 @@ +import docker +import pytest + +from . import fake_api +from .api_test import ( + DockerClientTest, fake_request, DEFAULT_TIMEOUT_SECONDS, url_prefix, + fake_resolve_authconfig +) + +try: + from unittest import mock +except ImportError: + import mock + + +class ImageTest(DockerClientTest): + def test_image_viz(self): + with pytest.raises(Exception): + self.client.images('busybox', viz=True) + self.fail('Viz output should not be supported!') + + def test_images(self): + self.client.images(all=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/json', + params={'filter': None, 'only_ids': 0, 'all': 1}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_images_quiet(self): + self.client.images(all=True, quiet=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/json', + params={'filter': None, 'only_ids': 1, 'all': 1}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_image_ids(self): + self.client.images(quiet=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/json', + params={'filter': None, 'only_ids': 1, 'all': 0}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_images_filters(self): + self.client.images(filters={'dangling': True}) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/json', + params={'filter': None, 'only_ids': 0, 'all': 0, + 'filters': '{"dangling": ["true"]}'}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_pull(self): + self.client.pull('joffrey/test001') + + args = fake_request.call_args + self.assertEqual( + args[0][1], + url_prefix + 'images/create' + ) + self.assertEqual( + args[1]['params'], + {'tag': None, 'fromImage': 'joffrey/test001'} + ) + self.assertFalse(args[1]['stream']) + + def test_pull_stream(self): + self.client.pull('joffrey/test001', stream=True) + + args = fake_request.call_args + self.assertEqual( + args[0][1], + url_prefix + 'images/create' + ) + self.assertEqual( + args[1]['params'], + {'tag': None, 'fromImage': 'joffrey/test001'} + ) + self.assertTrue(args[1]['stream']) + + def test_commit(self): + self.client.commit(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'commit', + data='{}', + headers={'Content-Type': 'application/json'}, + params={ + 'repo': None, + 'comment': None, + 'tag': None, + 'container': '3cc2351ab11b', + 'author': None + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_remove_image(self): + self.client.remove_image(fake_api.FAKE_IMAGE_ID) + + fake_request.assert_called_with( + 'DELETE', + url_prefix + 'images/e9aa60c60128', + params={'force': False, 'noprune': False}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_image_history(self): + self.client.history(fake_api.FAKE_IMAGE_NAME) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/test_image/history', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_import_image(self): + self.client.import_image( + fake_api.FAKE_TARBALL_PATH, + repository=fake_api.FAKE_REPO_NAME, + tag=fake_api.FAKE_TAG_NAME + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/create', + params={ + 'repo': fake_api.FAKE_REPO_NAME, + 'tag': fake_api.FAKE_TAG_NAME, + 'fromSrc': fake_api.FAKE_TARBALL_PATH + }, + data=None, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_import_image_from_bytes(self): + stream = (i for i in range(0, 100)) + + self.client.import_image( + stream, + repository=fake_api.FAKE_REPO_NAME, + tag=fake_api.FAKE_TAG_NAME + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/create', + params={ + 'repo': fake_api.FAKE_REPO_NAME, + 'tag': fake_api.FAKE_TAG_NAME, + 'fromSrc': '-', + }, + headers={ + 'Content-Type': 'application/tar', + }, + data=stream, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_import_image_from_image(self): + self.client.import_image( + image=fake_api.FAKE_IMAGE_NAME, + repository=fake_api.FAKE_REPO_NAME, + tag=fake_api.FAKE_TAG_NAME + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/create', + params={ + 'repo': fake_api.FAKE_REPO_NAME, + 'tag': fake_api.FAKE_TAG_NAME, + 'fromImage': fake_api.FAKE_IMAGE_NAME + }, + data=None, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_inspect_image(self): + self.client.inspect_image(fake_api.FAKE_IMAGE_NAME) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/test_image/json', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_inspect_image_undefined_id(self): + for arg in None, '', {True: True}: + with pytest.raises(docker.errors.NullResource) as excinfo: + self.client.inspect_image(arg) + + self.assertEqual( + excinfo.value.args[0], 'image or container param is undefined' + ) + + def test_insert_image(self): + try: + self.client.insert(fake_api.FAKE_IMAGE_NAME, + fake_api.FAKE_URL, fake_api.FAKE_PATH) + except docker.errors.DeprecatedMethod: + self.assertTrue( + docker.utils.compare_version('1.12', self.client._version) >= 0 + ) + return + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/test_image/insert', + params={ + 'url': fake_api.FAKE_URL, + 'path': fake_api.FAKE_PATH + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_push_image(self): + with mock.patch('docker.auth.auth.resolve_authconfig', + fake_resolve_authconfig): + self.client.push(fake_api.FAKE_IMAGE_NAME) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/test_image/push', + params={ + 'tag': None + }, + data='{}', + headers={'Content-Type': 'application/json'}, + stream=False, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_push_image_with_tag(self): + with mock.patch('docker.auth.auth.resolve_authconfig', + fake_resolve_authconfig): + self.client.push( + fake_api.FAKE_IMAGE_NAME, tag=fake_api.FAKE_TAG_NAME + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/test_image/push', + params={ + 'tag': fake_api.FAKE_TAG_NAME, + }, + data='{}', + headers={'Content-Type': 'application/json'}, + stream=False, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_push_image_stream(self): + with mock.patch('docker.auth.auth.resolve_authconfig', + fake_resolve_authconfig): + self.client.push(fake_api.FAKE_IMAGE_NAME, stream=True) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/test_image/push', + params={ + 'tag': None + }, + data='{}', + headers={'Content-Type': 'application/json'}, + stream=True, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_tag_image(self): + self.client.tag(fake_api.FAKE_IMAGE_ID, fake_api.FAKE_REPO_NAME) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/e9aa60c60128/tag', + params={ + 'tag': None, + 'repo': 'repo', + 'force': 0 + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_tag_image_tag(self): + self.client.tag( + fake_api.FAKE_IMAGE_ID, + fake_api.FAKE_REPO_NAME, + tag=fake_api.FAKE_TAG_NAME + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/e9aa60c60128/tag', + params={ + 'tag': 'tag', + 'repo': 'repo', + 'force': 0 + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_tag_image_force(self): + self.client.tag( + fake_api.FAKE_IMAGE_ID, fake_api.FAKE_REPO_NAME, force=True) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/e9aa60c60128/tag', + params={ + 'tag': None, + 'repo': 'repo', + 'force': 1 + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_get_image(self): + self.client.get_image(fake_api.FAKE_IMAGE_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/e9aa60c60128/get', + stream=True, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_load_image(self): + self.client.load_image('Byte Stream....') + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/load', + data='Byte Stream....', + timeout=DEFAULT_TIMEOUT_SECONDS + ) diff --git a/testbed/docker__docker-py/tests/unit/network_test.py b/testbed/docker__docker-py/tests/unit/network_test.py new file mode 100644 index 0000000000000000000000000000000000000000..5bba9db2307a8af59f3fdb103281326daad0d708 --- /dev/null +++ b/testbed/docker__docker-py/tests/unit/network_test.py @@ -0,0 +1,187 @@ +import json + +import six + +from .. import base +from .api_test import DockerClientTest, url_prefix, response +from docker.utils import create_ipam_config, create_ipam_pool + +try: + from unittest import mock +except ImportError: + import mock + + +class NetworkTest(DockerClientTest): + @base.requires_api_version('1.21') + def test_list_networks(self): + networks = [ + { + "name": "none", + "id": "8e4e55c6863ef424", + "type": "null", + "endpoints": [] + }, + { + "name": "host", + "id": "062b6d9ea7913fde", + "type": "host", + "endpoints": [] + }, + ] + + get = mock.Mock(return_value=response( + status_code=200, content=json.dumps(networks).encode('utf-8'))) + + with mock.patch('docker.Client.get', get): + self.assertEqual(self.client.networks(), networks) + + self.assertEqual(get.call_args[0][0], url_prefix + 'networks') + + filters = json.loads(get.call_args[1]['params']['filters']) + self.assertFalse(filters) + + self.client.networks(names=['foo']) + filters = json.loads(get.call_args[1]['params']['filters']) + self.assertEqual(filters, {'name': ['foo']}) + + self.client.networks(ids=['123']) + filters = json.loads(get.call_args[1]['params']['filters']) + self.assertEqual(filters, {'id': ['123']}) + + @base.requires_api_version('1.21') + def test_create_network(self): + network_data = { + "id": 'abc12345', + "warning": "", + } + + network_response = response(status_code=200, content=network_data) + post = mock.Mock(return_value=network_response) + + with mock.patch('docker.Client.post', post): + result = self.client.create_network('foo') + self.assertEqual(result, network_data) + + self.assertEqual( + post.call_args[0][0], + url_prefix + 'networks/create') + + self.assertEqual( + json.loads(post.call_args[1]['data']), + {"Name": "foo"}) + + opts = { + 'com.docker.network.bridge.enable_icc': False, + 'com.docker.network.bridge.enable_ip_masquerade': False, + } + self.client.create_network('foo', 'bridge', opts) + + self.assertEqual( + json.loads(post.call_args[1]['data']), + {"Name": "foo", "Driver": "bridge", "Options": opts}) + + ipam_pool_config = create_ipam_pool(subnet="192.168.52.0/24", + gateway="192.168.52.254") + ipam_config = create_ipam_config(pool_configs=[ipam_pool_config]) + + self.client.create_network("bar", driver="bridge", + ipam=ipam_config) + + self.assertEqual( + json.loads(post.call_args[1]['data']), + { + "Name": "bar", + "Driver": "bridge", + "IPAM": { + "Driver": "default", + "Config": [{ + "IPRange": None, + "Gateway": "192.168.52.254", + "Subnet": "192.168.52.0/24", + "AuxiliaryAddresses": None, + }] + } + }) + + @base.requires_api_version('1.21') + def test_remove_network(self): + network_id = 'abc12345' + delete = mock.Mock(return_value=response(status_code=200)) + + with mock.patch('docker.Client.delete', delete): + self.client.remove_network(network_id) + + args = delete.call_args + self.assertEqual(args[0][0], + url_prefix + 'networks/{0}'.format(network_id)) + + @base.requires_api_version('1.21') + def test_inspect_network(self): + network_id = 'abc12345' + network_name = 'foo' + network_data = { + six.u('name'): network_name, + six.u('id'): network_id, + six.u('driver'): 'bridge', + six.u('containers'): {}, + } + + network_response = response(status_code=200, content=network_data) + get = mock.Mock(return_value=network_response) + + with mock.patch('docker.Client.get', get): + result = self.client.inspect_network(network_id) + self.assertEqual(result, network_data) + + args = get.call_args + self.assertEqual(args[0][0], + url_prefix + 'networks/{0}'.format(network_id)) + + @base.requires_api_version('1.21') + def test_connect_container_to_network(self): + network_id = 'abc12345' + container_id = 'def45678' + + post = mock.Mock(return_value=response(status_code=201)) + + with mock.patch('docker.Client.post', post): + self.client.connect_container_to_network( + {'Id': container_id}, + network_id, + aliases=['foo', 'bar'], + links=[('baz', 'quux')] + ) + + self.assertEqual( + post.call_args[0][0], + url_prefix + 'networks/{0}/connect'.format(network_id)) + + self.assertEqual( + json.loads(post.call_args[1]['data']), + { + 'Container': container_id, + 'EndpointConfig': { + 'Aliases': ['foo', 'bar'], + 'Links': ['baz:quux'], + }, + }) + + @base.requires_api_version('1.21') + def test_disconnect_container_from_network(self): + network_id = 'abc12345' + container_id = 'def45678' + + post = mock.Mock(return_value=response(status_code=201)) + + with mock.patch('docker.Client.post', post): + self.client.disconnect_container_from_network( + {'Id': container_id}, network_id) + + self.assertEqual( + post.call_args[0][0], + url_prefix + 'networks/{0}/disconnect'.format(network_id)) + + self.assertEqual( + json.loads(post.call_args[1]['data']), + {'container': container_id}) diff --git a/testbed/docker__docker-py/tests/unit/testdata/certs/ca.pem b/testbed/docker__docker-py/tests/unit/testdata/certs/ca.pem new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/docker__docker-py/tests/unit/testdata/certs/cert.pem b/testbed/docker__docker-py/tests/unit/testdata/certs/cert.pem new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/docker__docker-py/tests/unit/testdata/certs/key.pem b/testbed/docker__docker-py/tests/unit/testdata/certs/key.pem new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/docker__docker-py/tests/unit/utils_test.py b/testbed/docker__docker-py/tests/unit/utils_test.py new file mode 100644 index 0000000000000000000000000000000000000000..65b7cf8ab4bf582de6fb5701319d835aa65f1631 --- /dev/null +++ b/testbed/docker__docker-py/tests/unit/utils_test.py @@ -0,0 +1,944 @@ +# -*- coding: utf-8 -*- + +import base64 +import json +import os +import os.path +import shutil +import sys +import tarfile +import tempfile + +import pytest +import six + +try: + from ssl import OP_NO_SSLv3, OP_NO_SSLv2, OP_NO_TLSv1 +except ImportError: + OP_NO_SSLv2 = 0x1000000 + OP_NO_SSLv3 = 0x2000000 + OP_NO_TLSv1 = 0x4000000 + +from docker.client import Client +from docker.constants import DEFAULT_DOCKER_API_VERSION +from docker.errors import DockerException, InvalidVersion +from docker.ssladapter import ssladapter +from docker.utils import ( + parse_repository_tag, parse_host, convert_filters, kwargs_from_env, + create_host_config, Ulimit, LogConfig, parse_bytes, parse_env_file, + exclude_paths, convert_volume_binds, decode_json_header, tar, + split_command, create_ipam_config, create_ipam_pool, parse_devices, +) +from docker.utils.utils import create_endpoint_config +from docker.utils.ports import build_port_bindings, split_port + +from .. import base +from ..helpers import make_tree + + +TEST_CERT_DIR = os.path.join( + os.path.dirname(__file__), + 'testdata/certs', +) + + +class HostConfigTest(base.BaseTestCase): + def test_create_host_config_no_options(self): + config = create_host_config(version='1.19') + self.assertFalse('NetworkMode' in config) + + def test_create_host_config_no_options_newer_api_version(self): + config = create_host_config(version='1.20') + self.assertEqual(config['NetworkMode'], 'default') + + def test_create_host_config_invalid_cpu_cfs_types(self): + with pytest.raises(TypeError): + create_host_config(version='1.20', cpu_quota='0') + + with pytest.raises(TypeError): + create_host_config(version='1.20', cpu_period='0') + + with pytest.raises(TypeError): + create_host_config(version='1.20', cpu_quota=23.11) + + with pytest.raises(TypeError): + create_host_config(version='1.20', cpu_period=1999.0) + + def test_create_host_config_with_cpu_quota(self): + config = create_host_config(version='1.20', cpu_quota=1999) + self.assertEqual(config.get('CpuQuota'), 1999) + + def test_create_host_config_with_cpu_period(self): + config = create_host_config(version='1.20', cpu_period=1999) + self.assertEqual(config.get('CpuPeriod'), 1999) + + def test_create_host_config_with_shm_size(self): + config = create_host_config(version='1.22', shm_size=67108864) + self.assertEqual(config.get('ShmSize'), 67108864) + + def test_create_host_config_with_shm_size_in_mb(self): + config = create_host_config(version='1.22', shm_size='64M') + self.assertEqual(config.get('ShmSize'), 67108864) + + def test_create_host_config_with_oom_kill_disable(self): + config = create_host_config(version='1.20', oom_kill_disable=True) + self.assertEqual(config.get('OomKillDisable'), True) + self.assertRaises( + InvalidVersion, lambda: create_host_config(version='1.18.3', + oom_kill_disable=True)) + + def test_create_endpoint_config_with_aliases(self): + config = create_endpoint_config(version='1.22', aliases=['foo', 'bar']) + assert config == {'Aliases': ['foo', 'bar']} + + with pytest.raises(InvalidVersion): + create_endpoint_config(version='1.21', aliases=['foo', 'bar']) + + +class UlimitTest(base.BaseTestCase): + def test_create_host_config_dict_ulimit(self): + ulimit_dct = {'name': 'nofile', 'soft': 8096} + config = create_host_config( + ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION + ) + self.assertIn('Ulimits', config) + self.assertEqual(len(config['Ulimits']), 1) + ulimit_obj = config['Ulimits'][0] + self.assertTrue(isinstance(ulimit_obj, Ulimit)) + self.assertEqual(ulimit_obj.name, ulimit_dct['name']) + self.assertEqual(ulimit_obj.soft, ulimit_dct['soft']) + self.assertEqual(ulimit_obj['Soft'], ulimit_obj.soft) + + def test_create_host_config_dict_ulimit_capitals(self): + ulimit_dct = {'Name': 'nofile', 'Soft': 8096, 'Hard': 8096 * 4} + config = create_host_config( + ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION + ) + self.assertIn('Ulimits', config) + self.assertEqual(len(config['Ulimits']), 1) + ulimit_obj = config['Ulimits'][0] + self.assertTrue(isinstance(ulimit_obj, Ulimit)) + self.assertEqual(ulimit_obj.name, ulimit_dct['Name']) + self.assertEqual(ulimit_obj.soft, ulimit_dct['Soft']) + self.assertEqual(ulimit_obj.hard, ulimit_dct['Hard']) + self.assertEqual(ulimit_obj['Soft'], ulimit_obj.soft) + + def test_create_host_config_obj_ulimit(self): + ulimit_dct = Ulimit(name='nofile', soft=8096) + config = create_host_config( + ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION + ) + self.assertIn('Ulimits', config) + self.assertEqual(len(config['Ulimits']), 1) + ulimit_obj = config['Ulimits'][0] + self.assertTrue(isinstance(ulimit_obj, Ulimit)) + self.assertEqual(ulimit_obj, ulimit_dct) + + def test_ulimit_invalid_type(self): + self.assertRaises(ValueError, lambda: Ulimit(name=None)) + self.assertRaises(ValueError, lambda: Ulimit(name='hello', soft='123')) + self.assertRaises(ValueError, lambda: Ulimit(name='hello', hard='456')) + + +class LogConfigTest(base.BaseTestCase): + def test_create_host_config_dict_logconfig(self): + dct = {'type': LogConfig.types.SYSLOG, 'config': {'key1': 'val1'}} + config = create_host_config( + version=DEFAULT_DOCKER_API_VERSION, log_config=dct + ) + self.assertIn('LogConfig', config) + self.assertTrue(isinstance(config['LogConfig'], LogConfig)) + self.assertEqual(dct['type'], config['LogConfig'].type) + + def test_create_host_config_obj_logconfig(self): + obj = LogConfig(type=LogConfig.types.SYSLOG, config={'key1': 'val1'}) + config = create_host_config( + version=DEFAULT_DOCKER_API_VERSION, log_config=obj + ) + self.assertIn('LogConfig', config) + self.assertTrue(isinstance(config['LogConfig'], LogConfig)) + self.assertEqual(obj, config['LogConfig']) + + def test_logconfig_invalid_config_type(self): + with pytest.raises(ValueError): + LogConfig(type=LogConfig.types.JSON, config='helloworld') + + +class KwargsFromEnvTest(base.BaseTestCase): + def setUp(self): + self.os_environ = os.environ.copy() + + def tearDown(self): + os.environ = self.os_environ + + def test_kwargs_from_env_empty(self): + os.environ.update(DOCKER_HOST='', + DOCKER_CERT_PATH='') + os.environ.pop('DOCKER_TLS_VERIFY', None) + + kwargs = kwargs_from_env() + self.assertEqual(None, kwargs.get('base_url')) + self.assertEqual(None, kwargs.get('tls')) + + def test_kwargs_from_env_tls(self): + os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376', + DOCKER_CERT_PATH=TEST_CERT_DIR, + DOCKER_TLS_VERIFY='1') + kwargs = kwargs_from_env(assert_hostname=False) + self.assertEqual('https://192.168.59.103:2376', kwargs['base_url']) + self.assertTrue('ca.pem' in kwargs['tls'].ca_cert) + self.assertTrue('cert.pem' in kwargs['tls'].cert[0]) + self.assertTrue('key.pem' in kwargs['tls'].cert[1]) + self.assertEqual(False, kwargs['tls'].assert_hostname) + self.assertTrue(kwargs['tls'].verify) + try: + client = Client(**kwargs) + self.assertEqual(kwargs['base_url'], client.base_url) + self.assertEqual(kwargs['tls'].ca_cert, client.verify) + self.assertEqual(kwargs['tls'].cert, client.cert) + except TypeError as e: + self.fail(e) + + def test_kwargs_from_env_tls_verify_false(self): + os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376', + DOCKER_CERT_PATH=TEST_CERT_DIR, + DOCKER_TLS_VERIFY='') + kwargs = kwargs_from_env(assert_hostname=True) + self.assertEqual('https://192.168.59.103:2376', kwargs['base_url']) + self.assertTrue('ca.pem' in kwargs['tls'].ca_cert) + self.assertTrue('cert.pem' in kwargs['tls'].cert[0]) + self.assertTrue('key.pem' in kwargs['tls'].cert[1]) + self.assertEqual(True, kwargs['tls'].assert_hostname) + self.assertEqual(False, kwargs['tls'].verify) + try: + client = Client(**kwargs) + self.assertEqual(kwargs['base_url'], client.base_url) + self.assertEqual(kwargs['tls'].cert, client.cert) + self.assertFalse(kwargs['tls'].verify) + except TypeError as e: + self.fail(e) + + def test_kwargs_from_env_tls_verify_false_no_cert(self): + temp_dir = tempfile.mkdtemp() + cert_dir = os.path.join(temp_dir, '.docker') + shutil.copytree(TEST_CERT_DIR, cert_dir) + + os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376', + HOME=temp_dir, + DOCKER_TLS_VERIFY='') + os.environ.pop('DOCKER_CERT_PATH', None) + kwargs = kwargs_from_env(assert_hostname=True) + self.assertEqual('tcp://192.168.59.103:2376', kwargs['base_url']) + + def test_kwargs_from_env_no_cert_path(self): + try: + temp_dir = tempfile.mkdtemp() + cert_dir = os.path.join(temp_dir, '.docker') + shutil.copytree(TEST_CERT_DIR, cert_dir) + + os.environ.update(HOME=temp_dir, + DOCKER_CERT_PATH='', + DOCKER_TLS_VERIFY='1') + + kwargs = kwargs_from_env() + self.assertTrue(kwargs['tls'].verify) + self.assertIn(cert_dir, kwargs['tls'].ca_cert) + self.assertIn(cert_dir, kwargs['tls'].cert[0]) + self.assertIn(cert_dir, kwargs['tls'].cert[1]) + finally: + if temp_dir: + shutil.rmtree(temp_dir) + + +class ConverVolumeBindsTest(base.BaseTestCase): + def test_convert_volume_binds_empty(self): + self.assertEqual(convert_volume_binds({}), []) + self.assertEqual(convert_volume_binds([]), []) + + def test_convert_volume_binds_list(self): + data = ['/a:/a:ro', '/b:/c:z'] + self.assertEqual(convert_volume_binds(data), data) + + def test_convert_volume_binds_complete(self): + data = { + '/mnt/vol1': { + 'bind': '/data', + 'mode': 'ro' + } + } + self.assertEqual(convert_volume_binds(data), ['/mnt/vol1:/data:ro']) + + def test_convert_volume_binds_compact(self): + data = { + '/mnt/vol1': '/data' + } + self.assertEqual(convert_volume_binds(data), ['/mnt/vol1:/data:rw']) + + def test_convert_volume_binds_no_mode(self): + data = { + '/mnt/vol1': { + 'bind': '/data' + } + } + self.assertEqual(convert_volume_binds(data), ['/mnt/vol1:/data:rw']) + + def test_convert_volume_binds_unicode_bytes_input(self): + if six.PY2: + expected = [unicode('/mnt/지연:/unicode/박:rw', 'utf-8')] + + data = { + '/mnt/지연': { + 'bind': '/unicode/박', + 'mode': 'rw' + } + } + self.assertEqual( + convert_volume_binds(data), expected + ) + else: + expected = ['/mnt/지연:/unicode/박:rw'] + + data = { + bytes('/mnt/지연', 'utf-8'): { + 'bind': bytes('/unicode/박', 'utf-8'), + 'mode': 'rw' + } + } + self.assertEqual( + convert_volume_binds(data), expected + ) + + def test_convert_volume_binds_unicode_unicode_input(self): + if six.PY2: + expected = [unicode('/mnt/지연:/unicode/박:rw', 'utf-8')] + + data = { + unicode('/mnt/지연', 'utf-8'): { + 'bind': unicode('/unicode/박', 'utf-8'), + 'mode': 'rw' + } + } + self.assertEqual( + convert_volume_binds(data), expected + ) + else: + expected = ['/mnt/지연:/unicode/박:rw'] + + data = { + '/mnt/지연': { + 'bind': '/unicode/박', + 'mode': 'rw' + } + } + self.assertEqual( + convert_volume_binds(data), expected + ) + + +class ParseEnvFileTest(base.BaseTestCase): + def generate_tempfile(self, file_content=None): + """ + Generates a temporary file for tests with the content + of 'file_content' and returns the filename. + Don't forget to unlink the file with os.unlink() after. + """ + local_tempfile = tempfile.NamedTemporaryFile(delete=False) + local_tempfile.write(file_content.encode('UTF-8')) + local_tempfile.close() + return local_tempfile.name + + def test_parse_env_file_proper(self): + env_file = self.generate_tempfile( + file_content='USER=jdoe\nPASS=secret') + get_parse_env_file = parse_env_file(env_file) + self.assertEqual(get_parse_env_file, + {'USER': 'jdoe', 'PASS': 'secret'}) + os.unlink(env_file) + + def test_parse_env_file_commented_line(self): + env_file = self.generate_tempfile( + file_content='USER=jdoe\n#PASS=secret') + get_parse_env_file = parse_env_file((env_file)) + self.assertEqual(get_parse_env_file, {'USER': 'jdoe'}) + os.unlink(env_file) + + def test_parse_env_file_invalid_line(self): + env_file = self.generate_tempfile( + file_content='USER jdoe') + self.assertRaises( + DockerException, parse_env_file, env_file) + os.unlink(env_file) + + +class ParseHostTest(base.BaseTestCase): + def test_parse_host(self): + invalid_hosts = [ + '0.0.0.0', + 'tcp://', + 'udp://127.0.0.1', + 'udp://127.0.0.1:2375', + ] + + valid_hosts = { + '0.0.0.1:5555': 'http://0.0.0.1:5555', + ':6666': 'http://127.0.0.1:6666', + 'tcp://:7777': 'http://127.0.0.1:7777', + 'http://:7777': 'http://127.0.0.1:7777', + 'https://kokia.jp:2375': 'https://kokia.jp:2375', + 'unix:///var/run/docker.sock': 'http+unix:///var/run/docker.sock', + 'unix://': 'http+unix://var/run/docker.sock', + 'somehost.net:80/service/swarm': ( + 'http://somehost.net:80/service/swarm' + ), + } + + for host in invalid_hosts: + with pytest.raises(DockerException): + parse_host(host, None) + + for host, expected in valid_hosts.items(): + self.assertEqual(parse_host(host, None), expected, msg=host) + + def test_parse_host_empty_value(self): + unix_socket = 'http+unix://var/run/docker.sock' + tcp_port = 'http://127.0.0.1:2375' + + for val in [None, '']: + for platform in ['darwin', 'linux2', None]: + assert parse_host(val, platform) == unix_socket + + assert parse_host(val, 'win32') == tcp_port + + def test_parse_host_tls(self): + host_value = 'myhost.docker.net:3348' + expected_result = 'https://myhost.docker.net:3348' + self.assertEqual(parse_host(host_value, None, True), expected_result) + + +class ParseRepositoryTagTest(base.BaseTestCase): + sha = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' + + def test_index_image_no_tag(self): + self.assertEqual( + parse_repository_tag("root"), ("root", None) + ) + + def test_index_image_tag(self): + self.assertEqual( + parse_repository_tag("root:tag"), ("root", "tag") + ) + + def test_index_user_image_no_tag(self): + self.assertEqual( + parse_repository_tag("user/repo"), ("user/repo", None) + ) + + def test_index_user_image_tag(self): + self.assertEqual( + parse_repository_tag("user/repo:tag"), ("user/repo", "tag") + ) + + def test_private_reg_image_no_tag(self): + self.assertEqual( + parse_repository_tag("url:5000/repo"), ("url:5000/repo", None) + ) + + def test_private_reg_image_tag(self): + self.assertEqual( + parse_repository_tag("url:5000/repo:tag"), ("url:5000/repo", "tag") + ) + + def test_index_image_sha(self): + self.assertEqual( + parse_repository_tag("root@sha256:{0}".format(self.sha)), + ("root", "sha256:{0}".format(self.sha)) + ) + + def test_private_reg_image_sha(self): + self.assertEqual( + parse_repository_tag("url:5000/repo@sha256:{0}".format(self.sha)), + ("url:5000/repo", "sha256:{0}".format(self.sha)) + ) + + +class ParseDeviceTest(base.BaseTestCase): + def test_dict(self): + devices = parse_devices([{ + 'PathOnHost': '/dev/sda1', + 'PathInContainer': '/dev/mnt1', + 'CgroupPermissions': 'r' + }]) + self.assertEqual(devices[0], { + 'PathOnHost': '/dev/sda1', + 'PathInContainer': '/dev/mnt1', + 'CgroupPermissions': 'r' + }) + + def test_partial_string_definition(self): + devices = parse_devices(['/dev/sda1']) + self.assertEqual(devices[0], { + 'PathOnHost': '/dev/sda1', + 'PathInContainer': '/dev/sda1', + 'CgroupPermissions': 'rwm' + }) + + def test_permissionless_string_definition(self): + devices = parse_devices(['/dev/sda1:/dev/mnt1']) + self.assertEqual(devices[0], { + 'PathOnHost': '/dev/sda1', + 'PathInContainer': '/dev/mnt1', + 'CgroupPermissions': 'rwm' + }) + + def test_full_string_definition(self): + devices = parse_devices(['/dev/sda1:/dev/mnt1:r']) + self.assertEqual(devices[0], { + 'PathOnHost': '/dev/sda1', + 'PathInContainer': '/dev/mnt1', + 'CgroupPermissions': 'r' + }) + + def test_hybrid_list(self): + devices = parse_devices([ + '/dev/sda1:/dev/mnt1:rw', + { + 'PathOnHost': '/dev/sda2', + 'PathInContainer': '/dev/mnt2', + 'CgroupPermissions': 'r' + } + ]) + + self.assertEqual(devices[0], { + 'PathOnHost': '/dev/sda1', + 'PathInContainer': '/dev/mnt1', + 'CgroupPermissions': 'rw' + }) + self.assertEqual(devices[1], { + 'PathOnHost': '/dev/sda2', + 'PathInContainer': '/dev/mnt2', + 'CgroupPermissions': 'r' + }) + + +class ParseBytesTest(base.BaseTestCase): + def test_parse_bytes_valid(self): + self.assertEqual(parse_bytes("512MB"), 536870912) + self.assertEqual(parse_bytes("512M"), 536870912) + self.assertEqual(parse_bytes("512m"), 536870912) + + def test_parse_bytes_invalid(self): + self.assertRaises(DockerException, parse_bytes, "512MK") + self.assertRaises(DockerException, parse_bytes, "512L") + self.assertRaises(DockerException, parse_bytes, "127.0.0.1K") + + def test_parse_bytes_float(self): + self.assertRaises(DockerException, parse_bytes, "1.5k") + + def test_parse_bytes_maxint(self): + self.assertEqual( + parse_bytes("{0}k".format(sys.maxsize)), sys.maxsize * 1024 + ) + + +class UtilsTest(base.BaseTestCase): + longMessage = True + + def test_convert_filters(self): + tests = [ + ({'dangling': True}, '{"dangling": ["true"]}'), + ({'dangling': "true"}, '{"dangling": ["true"]}'), + ({'exited': 0}, '{"exited": [0]}'), + ({'exited': [0, 1]}, '{"exited": [0, 1]}'), + ] + + for filters, expected in tests: + self.assertEqual(convert_filters(filters), expected) + + def test_decode_json_header(self): + obj = {'a': 'b', 'c': 1} + data = None + if six.PY3: + data = base64.urlsafe_b64encode(bytes(json.dumps(obj), 'utf-8')) + else: + data = base64.urlsafe_b64encode(json.dumps(obj)) + decoded_data = decode_json_header(data) + self.assertEqual(obj, decoded_data) + + def test_create_ipam_config(self): + ipam_pool = create_ipam_pool(subnet='192.168.52.0/24', + gateway='192.168.52.254') + + ipam_config = create_ipam_config(pool_configs=[ipam_pool]) + self.assertEqual(ipam_config, { + 'Driver': 'default', + 'Config': [{ + 'Subnet': '192.168.52.0/24', + 'Gateway': '192.168.52.254', + 'AuxiliaryAddresses': None, + 'IPRange': None, + }] + }) + + +class SplitCommandTest(base.BaseTestCase): + + def test_split_command_with_unicode(self): + if six.PY2: + self.assertEqual( + split_command(unicode('echo μμ', 'utf-8')), + ['echo', 'μμ'] + ) + else: + self.assertEqual(split_command('echo μμ'), ['echo', 'μμ']) + + @pytest.mark.skipif(six.PY3, reason="shlex doesn't support bytes in py3") + def test_split_command_with_bytes(self): + self.assertEqual(split_command('echo μμ'), ['echo', 'μμ']) + + +class PortsTest(base.BaseTestCase): + def test_split_port_with_host_ip(self): + internal_port, external_port = split_port("127.0.0.1:1000:2000") + self.assertEqual(internal_port, ["2000"]) + self.assertEqual(external_port, [("127.0.0.1", "1000")]) + + def test_split_port_with_protocol(self): + internal_port, external_port = split_port("127.0.0.1:1000:2000/udp") + self.assertEqual(internal_port, ["2000/udp"]) + self.assertEqual(external_port, [("127.0.0.1", "1000")]) + + def test_split_port_with_host_ip_no_port(self): + internal_port, external_port = split_port("127.0.0.1::2000") + self.assertEqual(internal_port, ["2000"]) + self.assertEqual(external_port, [("127.0.0.1", None)]) + + def test_split_port_range_with_host_ip_no_port(self): + internal_port, external_port = split_port("127.0.0.1::2000-2001") + self.assertEqual(internal_port, ["2000", "2001"]) + self.assertEqual(external_port, + [("127.0.0.1", None), ("127.0.0.1", None)]) + + def test_split_port_with_host_port(self): + internal_port, external_port = split_port("1000:2000") + self.assertEqual(internal_port, ["2000"]) + self.assertEqual(external_port, ["1000"]) + + def test_split_port_range_with_host_port(self): + internal_port, external_port = split_port("1000-1001:2000-2001") + self.assertEqual(internal_port, ["2000", "2001"]) + self.assertEqual(external_port, ["1000", "1001"]) + + def test_split_port_no_host_port(self): + internal_port, external_port = split_port("2000") + self.assertEqual(internal_port, ["2000"]) + self.assertEqual(external_port, None) + + def test_split_port_range_no_host_port(self): + internal_port, external_port = split_port("2000-2001") + self.assertEqual(internal_port, ["2000", "2001"]) + self.assertEqual(external_port, None) + + def test_split_port_range_with_protocol(self): + internal_port, external_port = split_port( + "127.0.0.1:1000-1001:2000-2001/udp") + self.assertEqual(internal_port, ["2000/udp", "2001/udp"]) + self.assertEqual(external_port, + [("127.0.0.1", "1000"), ("127.0.0.1", "1001")]) + + def test_split_port_invalid(self): + self.assertRaises(ValueError, + lambda: split_port("0.0.0.0:1000:2000:tcp")) + + def test_non_matching_length_port_ranges(self): + self.assertRaises( + ValueError, + lambda: split_port("0.0.0.0:1000-1010:2000-2002/tcp") + ) + + def test_port_and_range_invalid(self): + self.assertRaises(ValueError, + lambda: split_port("0.0.0.0:1000:2000-2002/tcp")) + + def test_port_only_with_colon(self): + self.assertRaises(ValueError, + lambda: split_port(":80")) + + def test_host_only_with_colon(self): + self.assertRaises(ValueError, + lambda: split_port("localhost:")) + + def test_build_port_bindings_with_one_port(self): + port_bindings = build_port_bindings(["127.0.0.1:1000:1000"]) + self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) + + def test_build_port_bindings_with_matching_internal_ports(self): + port_bindings = build_port_bindings( + ["127.0.0.1:1000:1000", "127.0.0.1:2000:1000"]) + self.assertEqual(port_bindings["1000"], + [("127.0.0.1", "1000"), ("127.0.0.1", "2000")]) + + def test_build_port_bindings_with_nonmatching_internal_ports(self): + port_bindings = build_port_bindings( + ["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"]) + self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) + self.assertEqual(port_bindings["2000"], [("127.0.0.1", "2000")]) + + def test_build_port_bindings_with_port_range(self): + port_bindings = build_port_bindings(["127.0.0.1:1000-1001:1000-1001"]) + self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) + self.assertEqual(port_bindings["1001"], [("127.0.0.1", "1001")]) + + def test_build_port_bindings_with_matching_internal_port_ranges(self): + port_bindings = build_port_bindings( + ["127.0.0.1:1000-1001:1000-1001", "127.0.0.1:2000-2001:1000-1001"]) + self.assertEqual(port_bindings["1000"], + [("127.0.0.1", "1000"), ("127.0.0.1", "2000")]) + self.assertEqual(port_bindings["1001"], + [("127.0.0.1", "1001"), ("127.0.0.1", "2001")]) + + def test_build_port_bindings_with_nonmatching_internal_port_ranges(self): + port_bindings = build_port_bindings( + ["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"]) + self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) + self.assertEqual(port_bindings["2000"], [("127.0.0.1", "2000")]) + + +class ExcludePathsTest(base.BaseTestCase): + dirs = [ + 'foo', + 'foo/bar', + 'bar', + ] + + files = [ + 'Dockerfile', + 'Dockerfile.alt', + '.dockerignore', + 'a.py', + 'a.go', + 'b.py', + 'cde.py', + 'foo/a.py', + 'foo/b.py', + 'foo/bar/a.py', + 'bar/a.py', + 'foo/Dockerfile3', + ] + + all_paths = set(dirs + files) + + def setUp(self): + self.base = make_tree(self.dirs, self.files) + + def tearDown(self): + shutil.rmtree(self.base) + + def exclude(self, patterns, dockerfile=None): + return set(exclude_paths(self.base, patterns, dockerfile=dockerfile)) + + def test_no_excludes(self): + assert self.exclude(['']) == self.all_paths + + def test_no_dupes(self): + paths = exclude_paths(self.base, ['!a.py']) + assert sorted(paths) == sorted(set(paths)) + + def test_wildcard_exclude(self): + assert self.exclude(['*']) == set(['Dockerfile', '.dockerignore']) + + def test_exclude_dockerfile_dockerignore(self): + """ + Even if the .dockerignore file explicitly says to exclude + Dockerfile and/or .dockerignore, don't exclude them from + the actual tar file. + """ + assert self.exclude(['Dockerfile', '.dockerignore']) == self.all_paths + + def test_exclude_custom_dockerfile(self): + """ + If we're using a custom Dockerfile, make sure that's not + excluded. + """ + assert self.exclude(['*'], dockerfile='Dockerfile.alt') == \ + set(['Dockerfile.alt', '.dockerignore']) + + assert self.exclude(['*'], dockerfile='foo/Dockerfile3') == \ + set(['foo/Dockerfile3', '.dockerignore']) + + def test_exclude_dockerfile_child(self): + includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3') + assert 'foo/Dockerfile3' in includes + assert 'foo/a.py' not in includes + + def test_single_filename(self): + assert self.exclude(['a.py']) == self.all_paths - set(['a.py']) + + # As odd as it sounds, a filename pattern with a trailing slash on the + # end *will* result in that file being excluded. + def test_single_filename_trailing_slash(self): + assert self.exclude(['a.py/']) == self.all_paths - set(['a.py']) + + def test_wildcard_filename_start(self): + assert self.exclude(['*.py']) == self.all_paths - set([ + 'a.py', 'b.py', 'cde.py', + ]) + + def test_wildcard_with_exception(self): + assert self.exclude(['*.py', '!b.py']) == self.all_paths - set([ + 'a.py', 'cde.py', + ]) + + def test_wildcard_with_wildcard_exception(self): + assert self.exclude(['*.*', '!*.go']) == self.all_paths - set([ + 'a.py', 'b.py', 'cde.py', 'Dockerfile.alt', + ]) + + def test_wildcard_filename_end(self): + assert self.exclude(['a.*']) == self.all_paths - set(['a.py', 'a.go']) + + def test_question_mark(self): + assert self.exclude(['?.py']) == self.all_paths - set(['a.py', 'b.py']) + + def test_single_subdir_single_filename(self): + assert self.exclude(['foo/a.py']) == self.all_paths - set(['foo/a.py']) + + def test_single_subdir_wildcard_filename(self): + assert self.exclude(['foo/*.py']) == self.all_paths - set([ + 'foo/a.py', 'foo/b.py', + ]) + + def test_wildcard_subdir_single_filename(self): + assert self.exclude(['*/a.py']) == self.all_paths - set([ + 'foo/a.py', 'bar/a.py', + ]) + + def test_wildcard_subdir_wildcard_filename(self): + assert self.exclude(['*/*.py']) == self.all_paths - set([ + 'foo/a.py', 'foo/b.py', 'bar/a.py', + ]) + + def test_directory(self): + assert self.exclude(['foo']) == self.all_paths - set([ + 'foo', 'foo/a.py', 'foo/b.py', + 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3' + ]) + + def test_directory_with_trailing_slash(self): + assert self.exclude(['foo']) == self.all_paths - set([ + 'foo', 'foo/a.py', 'foo/b.py', + 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3' + ]) + + def test_directory_with_single_exception(self): + assert self.exclude(['foo', '!foo/bar/a.py']) == self.all_paths - set([ + 'foo/a.py', 'foo/b.py', 'foo', 'foo/bar', + 'foo/Dockerfile3' + ]) + + def test_directory_with_subdir_exception(self): + assert self.exclude(['foo', '!foo/bar']) == self.all_paths - set([ + 'foo/a.py', 'foo/b.py', 'foo', + 'foo/Dockerfile3' + ]) + + def test_directory_with_wildcard_exception(self): + assert self.exclude(['foo', '!foo/*.py']) == self.all_paths - set([ + 'foo/bar', 'foo/bar/a.py', 'foo', + 'foo/Dockerfile3' + ]) + + def test_subdirectory(self): + assert self.exclude(['foo/bar']) == self.all_paths - set([ + 'foo/bar', 'foo/bar/a.py', + ]) + + +class TarTest(base.Cleanup, base.BaseTestCase): + def test_tar_with_excludes(self): + dirs = [ + 'foo', + 'foo/bar', + 'bar', + ] + + files = [ + 'Dockerfile', + 'Dockerfile.alt', + '.dockerignore', + 'a.py', + 'a.go', + 'b.py', + 'cde.py', + 'foo/a.py', + 'foo/b.py', + 'foo/bar/a.py', + 'bar/a.py', + ] + + exclude = [ + '*.py', + '!b.py', + '!a.go', + 'foo', + 'Dockerfile*', + '.dockerignore', + ] + + expected_names = set([ + 'Dockerfile', + '.dockerignore', + 'a.go', + 'b.py', + 'bar', + 'bar/a.py', + ]) + + base = make_tree(dirs, files) + self.addCleanup(shutil.rmtree, base) + + with tar(base, exclude=exclude) as archive: + tar_data = tarfile.open(fileobj=archive) + assert sorted(tar_data.getnames()) == sorted(expected_names) + + def test_tar_with_empty_directory(self): + base = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base) + for d in ['foo', 'bar']: + os.makedirs(os.path.join(base, d)) + with tar(base) as archive: + tar_data = tarfile.open(fileobj=archive) + self.assertEqual(sorted(tar_data.getnames()), ['bar', 'foo']) + + def test_tar_with_file_symlinks(self): + base = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base) + with open(os.path.join(base, 'foo'), 'w') as f: + f.write("content") + os.makedirs(os.path.join(base, 'bar')) + os.symlink('../foo', os.path.join(base, 'bar/foo')) + with tar(base) as archive: + tar_data = tarfile.open(fileobj=archive) + self.assertEqual( + sorted(tar_data.getnames()), ['bar', 'bar/foo', 'foo'] + ) + + def test_tar_with_directory_symlinks(self): + base = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base) + for d in ['foo', 'bar']: + os.makedirs(os.path.join(base, d)) + os.symlink('../foo', os.path.join(base, 'bar/foo')) + with tar(base) as archive: + tar_data = tarfile.open(fileobj=archive) + self.assertEqual( + sorted(tar_data.getnames()), ['bar', 'bar/foo', 'foo'] + ) + + +class SSLAdapterTest(base.BaseTestCase): + def test_only_uses_tls(self): + ssl_context = ssladapter.urllib3.util.ssl_.create_urllib3_context() + + assert ssl_context.options & OP_NO_SSLv3 + assert ssl_context.options & OP_NO_SSLv2 + assert not ssl_context.options & OP_NO_TLSv1 diff --git a/testbed/docker__docker-py/tests/unit/volume_test.py b/testbed/docker__docker-py/tests/unit/volume_test.py new file mode 100644 index 0000000000000000000000000000000000000000..5b1823a4caa06e5497292e4f121aaecb0bfa48be --- /dev/null +++ b/testbed/docker__docker-py/tests/unit/volume_test.py @@ -0,0 +1,96 @@ +import json + +import pytest + +from .. import base +from .api_test import DockerClientTest, url_prefix, fake_request + + +class VolumeTest(DockerClientTest): + @base.requires_api_version('1.21') + def test_list_volumes(self): + volumes = self.client.volumes() + self.assertIn('Volumes', volumes) + self.assertEqual(len(volumes['Volumes']), 2) + args = fake_request.call_args + + self.assertEqual(args[0][0], 'GET') + self.assertEqual(args[0][1], url_prefix + 'volumes') + + @base.requires_api_version('1.21') + def test_list_volumes_and_filters(self): + volumes = self.client.volumes(filters={'dangling': True}) + assert 'Volumes' in volumes + assert len(volumes['Volumes']) == 2 + args = fake_request.call_args + + assert args[0][0] == 'GET' + assert args[0][1] == url_prefix + 'volumes' + assert args[1] == {'params': {'filters': '{"dangling": ["true"]}'}, + 'timeout': 60} + + @base.requires_api_version('1.21') + def test_create_volume(self): + name = 'perfectcherryblossom' + result = self.client.create_volume(name) + self.assertIn('Name', result) + self.assertEqual(result['Name'], name) + self.assertIn('Driver', result) + self.assertEqual(result['Driver'], 'local') + args = fake_request.call_args + + self.assertEqual(args[0][0], 'POST') + self.assertEqual(args[0][1], url_prefix + 'volumes/create') + self.assertEqual(json.loads(args[1]['data']), {'Name': name}) + + @base.requires_api_version('1.21') + def test_create_volume_with_driver(self): + name = 'perfectcherryblossom' + driver_name = 'sshfs' + self.client.create_volume(name, driver=driver_name) + args = fake_request.call_args + + self.assertEqual(args[0][0], 'POST') + self.assertEqual(args[0][1], url_prefix + 'volumes/create') + data = json.loads(args[1]['data']) + self.assertIn('Driver', data) + self.assertEqual(data['Driver'], driver_name) + + @base.requires_api_version('1.21') + def test_create_volume_invalid_opts_type(self): + with pytest.raises(TypeError): + self.client.create_volume( + 'perfectcherryblossom', driver_opts='hello=world' + ) + + with pytest.raises(TypeError): + self.client.create_volume( + 'perfectcherryblossom', driver_opts=['hello=world'] + ) + + with pytest.raises(TypeError): + self.client.create_volume( + 'perfectcherryblossom', driver_opts='' + ) + + @base.requires_api_version('1.21') + def test_inspect_volume(self): + name = 'perfectcherryblossom' + result = self.client.inspect_volume(name) + self.assertIn('Name', result) + self.assertEqual(result['Name'], name) + self.assertIn('Driver', result) + self.assertEqual(result['Driver'], 'local') + args = fake_request.call_args + + self.assertEqual(args[0][0], 'GET') + self.assertEqual(args[0][1], '{0}volumes/{1}'.format(url_prefix, name)) + + @base.requires_api_version('1.21') + def test_remove_volume(self): + name = 'perfectcherryblossom' + self.client.remove_volume(name) + args = fake_request.call_args + + self.assertEqual(args[0][0], 'DELETE') + self.assertEqual(args[0][1], '{0}volumes/{1}'.format(url_prefix, name)) diff --git a/testbed/docker__docker-py/tox.ini b/testbed/docker__docker-py/tox.ini new file mode 100644 index 0000000000000000000000000000000000000000..40e46fafbb392662995c9d61098410f038162f51 --- /dev/null +++ b/testbed/docker__docker-py/tox.ini @@ -0,0 +1,15 @@ +[tox] +envlist = py26, py27, py33, py34, flake8 +skipsdist=True + +[testenv] +usedevelop=True +commands = + py.test --cov=docker {posargs:tests/unit} +deps = + -r{toxinidir}/test-requirements.txt + -r{toxinidir}/requirements.txt + +[testenv:flake8] +commands = flake8 docker tests +deps = flake8 diff --git a/testbed/dpkp__kafka-python/docs/Makefile b/testbed/dpkp__kafka-python/docs/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..b27cf7742c7620780aa2f269a015c1f3385af3b1 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/Makefile @@ -0,0 +1,177 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/kafka-python.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/kafka-python.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/kafka-python" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/kafka-python" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/testbed/dpkp__kafka-python/docs/apidoc/BrokerConnection.rst b/testbed/dpkp__kafka-python/docs/apidoc/BrokerConnection.rst new file mode 100644 index 0000000000000000000000000000000000000000..c56cf4271d0852d77a5fa92a33f18d54ef672959 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/apidoc/BrokerConnection.rst @@ -0,0 +1,5 @@ +BrokerConnection +================ + +.. autoclass:: kafka.BrokerConnection + :members: diff --git a/testbed/dpkp__kafka-python/docs/apidoc/ClusterMetadata.rst b/testbed/dpkp__kafka-python/docs/apidoc/ClusterMetadata.rst new file mode 100644 index 0000000000000000000000000000000000000000..4b575b376ce98d69f1ed1efccdc9414942df16ed --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/apidoc/ClusterMetadata.rst @@ -0,0 +1,5 @@ +ClusterMetadata +=========== + +.. autoclass:: kafka.cluster.ClusterMetadata + :members: diff --git a/testbed/dpkp__kafka-python/docs/apidoc/KafkaAdminClient.rst b/testbed/dpkp__kafka-python/docs/apidoc/KafkaAdminClient.rst new file mode 100644 index 0000000000000000000000000000000000000000..837b00cabca3da39ac46f76f650a48bdd11d3c53 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/apidoc/KafkaAdminClient.rst @@ -0,0 +1,5 @@ +KafkaAdminClient +=========== + +.. autoclass:: kafka.KafkaAdminClient + :members: diff --git a/testbed/dpkp__kafka-python/docs/apidoc/KafkaClient.rst b/testbed/dpkp__kafka-python/docs/apidoc/KafkaClient.rst new file mode 100644 index 0000000000000000000000000000000000000000..5c9d736a20efafe9931217dd807e2c4e8b8231d1 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/apidoc/KafkaClient.rst @@ -0,0 +1,5 @@ +KafkaClient +=========== + +.. autoclass:: kafka.KafkaClient + :members: diff --git a/testbed/dpkp__kafka-python/docs/apidoc/KafkaConsumer.rst b/testbed/dpkp__kafka-python/docs/apidoc/KafkaConsumer.rst new file mode 100644 index 0000000000000000000000000000000000000000..39062c684a2f606278ddfedff26c7ec79a5f0075 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/apidoc/KafkaConsumer.rst @@ -0,0 +1,5 @@ +KafkaConsumer +============= + +.. autoclass:: kafka.KafkaConsumer + :members: diff --git a/testbed/dpkp__kafka-python/docs/apidoc/KafkaProducer.rst b/testbed/dpkp__kafka-python/docs/apidoc/KafkaProducer.rst new file mode 100644 index 0000000000000000000000000000000000000000..1b71c4114d47d6e0eb27532135d34b454988958d --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/apidoc/KafkaProducer.rst @@ -0,0 +1,5 @@ +KafkaProducer +============= + +.. autoclass:: kafka.KafkaProducer + :members: diff --git a/testbed/dpkp__kafka-python/docs/apidoc/modules.rst b/testbed/dpkp__kafka-python/docs/apidoc/modules.rst new file mode 100644 index 0000000000000000000000000000000000000000..066fc6523d4d08dd6d16b359bec1c40f8d0ee694 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/apidoc/modules.rst @@ -0,0 +1,11 @@ +kafka-python API +**************** + +.. toctree:: + + KafkaConsumer + KafkaProducer + KafkaAdminClient + KafkaClient + BrokerConnection + ClusterMetadata diff --git a/testbed/dpkp__kafka-python/docs/changelog.rst b/testbed/dpkp__kafka-python/docs/changelog.rst new file mode 100644 index 0000000000000000000000000000000000000000..446b290218ad153238da27f637b5d32f41d6fa4c --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/changelog.rst @@ -0,0 +1,1388 @@ +Changelog +========= + + +2.0.2 (Sep 29, 2020) +#################### + +Consumer +-------- +* KIP-54: Implement sticky partition assignment strategy (aynroot / PR #2057) +* Fix consumer deadlock when heartbeat thread request timeout (huangcuiyang / PR #2064) + +Compatibility +------------- +* Python 3.8 support (Photonios / PR #2088) + +Cleanups +-------- +* Bump dev requirements (jeffwidman / PR #2129) +* Fix crc32c deprecation warning (crc32c==2.1) (jeffwidman / PR #2128) +* Lint cleanup (jeffwidman / PR #2126) +* Fix initialization order in KafkaClient (pecalleja / PR #2119) +* Allow installing crc32c via extras (mishas / PR #2069) +* Remove unused imports (jameslamb / PR #2046) + +Admin Client +------------ +* Merge _find_coordinator_id methods (jeffwidman / PR #2127) +* Feature: delete consumergroups (swenzel / PR #2040) +* Allow configurable timeouts in admin client check version (sunnyakaxd / PR #2107) +* Enhancement for Kafka Admin Client's "Describe Consumer Group" (Apurva007 / PR #2035) + +Protocol +-------- +* Add support for zstd compression (gabriel-tincu / PR #2021) +* Add protocol support for brokers 1.1.0 - 2.5.0 (gabriel-tincu / PR #2038) +* Add ProduceRequest/ProduceResponse v6/v7/v8 (gabriel-tincu / PR #2020) +* Fix parsing NULL header values (kvfi / PR #2024) + +Tests +----- +* Add 2.5.0 to automated CI tests (gabriel-tincu / PR #2038) +* Add 2.1.1 to build_integration (gabriel-tincu / PR #2019) + +Documentation / Logging / Errors +-------------------------------- +* Disable logging during producer object gc (gioele / PR #2043) +* Update example.py; use threading instead of multiprocessing (Mostafa-Elmenbawy / PR #2081) +* Fix typo in exception message (haracejacob / PR #2096) +* Add kafka.structs docstrings (Mostafa-Elmenbawy / PR #2080) +* Fix broken compatibility page link (anuragrana / PR #2045) +* Rename README to README.md (qhzxc0015 / PR #2055) +* Fix docs by adding SASL mention (jeffwidman / #1990) + + +2.0.1 (Feb 19, 2020) +#################### + +Admin Client +------------ +* KAFKA-8962: Use least_loaded_node() for AdminClient.describe_topics() (jeffwidman / PR #2000) +* Fix AdminClient topic error parsing in MetadataResponse (jtribble / PR #1997) + + +2.0.0 (Feb 10, 2020) +#################### + +This release includes breaking changes for any application code that has not +migrated from older Simple-style classes to newer Kafka-style classes. + +Deprecation +----------- +* Remove deprecated SimpleClient, Producer, Consumer, Unittest (jeffwidman / PR #1196) + +Admin Client +------------ +* Use the controller for topic metadata requests (TylerLubeck / PR #1995) +* Implement list_topics, describe_topics, and describe_cluster (TylerLubeck / PR #1993) +* Implement __eq__ and __hash__ for ACL objects (TylerLubeck / PR #1955) +* Fixes KafkaAdminClient returning `IncompatibleBrokerVersion` when passing an `api_version` (ian28223 / PR #1953) +* Admin protocol updates (TylerLubeck / PR #1948) +* Fix describe config for multi-broker clusters (jlandersen / PR #1869) + +Miscellaneous Bugfixes / Improvements +------------------------------------- +* Enable SCRAM-SHA-256 and SCRAM-SHA-512 for sasl (swenzel / PR #1918) +* Fix slots usage and use more slots (carsonip / PR #1987) +* Optionally return OffsetAndMetadata from consumer.committed(tp) (dpkp / PR #1979) +* Reset conn configs on exception in conn.check_version() (dpkp / PR #1977) +* Do not block on sender thread join after timeout in producer.close() (dpkp / PR #1974) +* Implement methods to convert a Struct object to a pythonic object (TylerLubeck / PR #1951) + +Test Infrastructure / Documentation / Maintenance +------------------------------------------------- +* Update 2.4.0 resource files for sasl integration (dpkp) +* Add kafka 2.4.0 to CI testing (vvuibert / PR #1972) +* convert test_admin_integration to pytest (ulrikjohansson / PR #1923) +* xfail test_describe_configs_topic_resource_returns_configs (dpkp / Issue #1929) +* Add crc32c to README and docs (dpkp) +* Improve docs for reconnect_backoff_max_ms (dpkp / PR #1976) +* Fix simple typo: managementment -> management (timgates42 / PR #1966) +* Fix typos (carsonip / PR #1938) +* Fix doc import paths (jeffwidman / PR #1933) +* Update docstring to match conn.py's (dabcoder / PR #1921) +* Do not log topic-specific errors in full metadata fetch (dpkp / PR #1980) +* Raise AssertionError if consumer closed in poll() (dpkp / PR #1978) +* Log retriable coordinator NodeNotReady, TooManyInFlightRequests as debug not error (dpkp / PR #1975) +* Remove unused import (jeffwidman) +* Remove some dead code (jeffwidman) +* Fix a benchmark to Use print() function in both Python 2 and Python 3 (cclauss / PR #1983) +* Fix a test to use ==/!= to compare str, bytes, and int literals (cclauss / PR #1984) +* Fix benchmarks to use pyperf (carsonip / PR #1986) +* Remove unused/empty .gitsubmodules file (jeffwidman / PR #1928) +* Remove deprecated `ConnectionError` (jeffwidman / PR #1816) + + +1.4.7 (Sep 30, 2019) +#################### + +This is a minor release focused on KafkaConsumer performance, Admin Client +improvements, and Client concurrency. The KafkaConsumer iterator implementation +has been greatly simplified so that it just wraps consumer.poll(). The prior +implementation will remain available for a few more releases using the optional +KafkaConsumer config: `legacy_iterator=True` . This is expected to improve +consumer throughput substantially and help reduce heartbeat failures / group +rebalancing. + +Client +------ +* Send socket data via non-blocking IO with send buffer (dpkp / PR #1912) +* Rely on socket selector to detect completed connection attempts (dpkp / PR #1909) +* Improve connection lock handling; always use context manager (melor,dpkp / PR #1895) +* Reduce client poll timeout when there are no in-flight requests (dpkp / PR #1823) + +KafkaConsumer +------------- +* Do not use wakeup when sending fetch requests from consumer (dpkp / PR #1911) +* Wrap `consumer.poll()` for KafkaConsumer iteration (dpkp / PR #1902) +* Allow the coordinator to auto-commit on old brokers (justecorruptio / PR #1832) +* Reduce internal client poll timeout for (legacy) consumer iterator interface (dpkp / PR #1824) +* Use dedicated connection for group coordinator (dpkp / PR #1822) +* Change coordinator lock acquisition order (dpkp / PR #1821) +* Make `partitions_for_topic` a read-through cache (Baisang / PR #1781,#1809) +* Fix consumer hanging indefinitely on topic deletion while rebalancing (commanderdishwasher / PR #1782) + +Miscellaneous Bugfixes / Improvements +------------------------------------- +* Fix crc32c avilability on non-intel architectures (ossdev07 / PR #1904) +* Load system default SSL CAs if `ssl_cafile` is not provided (iAnomaly / PR #1883) +* Catch py3 TimeoutError in BrokerConnection send/recv (dpkp / PR #1820) +* Added a function to determine if bootstrap is successfully connected (Wayde2014 / PR #1876) + +Admin Client +------------ +* Add ACL api support to KafkaAdminClient (ulrikjohansson / PR #1833) +* Add `sasl_kerberos_domain_name` config to KafkaAdminClient (jeffwidman / PR #1852) +* Update `security_protocol` config documentation for KafkaAdminClient (cardy31 / PR #1849) +* Break FindCoordinator into request/response methods in KafkaAdminClient (jeffwidman / PR #1871) +* Break consumer operations into request / response methods in KafkaAdminClient (jeffwidman / PR #1845) +* Parallelize calls to `_send_request_to_node()` in KafkaAdminClient (davidheitman / PR #1807) + +Test Infrastructure / Documentation / Maintenance +------------------------------------------------- +* Add Kafka 2.3.0 to test matrix and compatibility docs (dpkp / PR #1915) +* Convert remaining `KafkaConsumer` tests to `pytest` (jeffwidman / PR #1886) +* Bump integration tests to 0.10.2.2 and 0.11.0.3 (jeffwidman / #1890) +* Cleanup handling of `KAFKA_VERSION` env var in tests (jeffwidman / PR #1887) +* Minor test cleanup (jeffwidman / PR #1885) +* Use `socket.SOCK_STREAM` in test assertions (iv-m / PR #1879) +* Sanity test for `consumer.topics()` and `consumer.partitions_for_topic()` (Baisang / PR #1829) +* Cleanup seconds conversion in client poll timeout calculation (jeffwidman / PR #1825) +* Remove unused imports (jeffwidman / PR #1808) +* Cleanup python nits in RangePartitionAssignor (jeffwidman / PR #1805) +* Update links to kafka consumer config docs (jeffwidman) +* Fix minor documentation typos (carsonip / PR #1865) +* Remove unused/weird comment line (jeffwidman / PR #1813) +* Update docs for `api_version_auto_timeout_ms` (jeffwidman / PR #1812) + + +1.4.6 (Apr 2, 2019) +################### + +This is a patch release primarily focused on bugs related to concurrency, +SSL connections and testing, and SASL authentication: + +Client Concurrency Issues (Race Conditions / Deadlocks) +------------------------------------------------------- +* Fix race condition in `protocol.send_bytes` (isamaru / PR #1752) +* Do not call `state_change_callback` with lock (dpkp / PR #1775) +* Additional BrokerConnection locks to synchronize protocol/IFR state (dpkp / PR #1768) +* Send pending requests before waiting for responses (dpkp / PR #1762) +* Avoid race condition on `client._conns` in send() (dpkp / PR #1772) +* Hold lock during `client.check_version` (dpkp / PR #1771) + +Producer Wakeup / TimeoutError +------------------------------ +* Dont wakeup during `maybe_refresh_metadata` -- it is only called by poll() (dpkp / PR #1769) +* Dont do client wakeup when sending from sender thread (dpkp / PR #1761) + +SSL - Python3.7 Support / Bootstrap Hostname Verification / Testing +------------------------------------------------------------------- +* Wrap SSL sockets after connecting for python3.7 compatibility (dpkp / PR #1754) +* Allow configuration of SSL Ciphers (dpkp / PR #1755) +* Maintain shadow cluster metadata for bootstrapping (dpkp / PR #1753) +* Generate SSL certificates for local testing (dpkp / PR #1756) +* Rename ssl.keystore.location and ssl.truststore.location config files (dpkp) +* Reset reconnect backoff on SSL connection (dpkp / PR #1777) + +SASL - OAuthBearer support / api version bugfix +----------------------------------------------- +* Fix 0.8.2 protocol quick detection / fix SASL version check (dpkp / PR #1763) +* Update sasl configuration docstrings to include supported mechanisms (dpkp) +* Support SASL OAuthBearer Authentication (pt2pham / PR #1750) + +Miscellaneous Bugfixes +---------------------- +* Dont force metadata refresh when closing unneeded bootstrap connections (dpkp / PR #1773) +* Fix possible AttributeError during conn._close_socket (dpkp / PR #1776) +* Return connection state explicitly after close in connect() (dpkp / PR #1778) +* Fix flaky conn tests that use time.time (dpkp / PR #1758) +* Add py to requirements-dev (dpkp) +* Fixups to benchmark scripts for py3 / new KafkaFixture interface (dpkp) + + +1.4.5 (Mar 14, 2019) +#################### + +This release is primarily focused on addressing lock contention +and other coordination issues between the KafkaConsumer and the +background heartbeat thread that was introduced in the 1.4 release. + +Consumer +-------- +* connections_max_idle_ms must be larger than request_timeout_ms (jeffwidman / PR #1688) +* Avoid race condition during close() / join heartbeat thread (dpkp / PR #1735) +* Use last offset from fetch v4 if available to avoid getting stuck in compacted topic (keithks / PR #1724) +* Synchronize puts to KafkaConsumer protocol buffer during async sends (dpkp / PR #1733) +* Improve KafkaConsumer join group / only enable Heartbeat Thread during stable group (dpkp / PR #1695) +* Remove unused `skip_double_compressed_messages` (jeffwidman / PR #1677) +* Fix commit_offsets_async() callback (Faqa / PR #1712) + +Client +------ +* Retry bootstrapping after backoff when necessary (dpkp / PR #1736) +* Recheck connecting nodes sooner when refreshing metadata (dpkp / PR #1737) +* Avoid probing broker versions twice on newer brokers (dpkp / PR #1738) +* Move all network connections and writes to KafkaClient.poll() (dpkp / PR #1729) +* Do not require client lock for read-only operations (dpkp / PR #1730) +* Timeout all unconnected conns (incl SSL) after request_timeout_ms (dpkp / PR #1696) + +Admin Client +------------ +* Fix AttributeError in response topic error codes checking (jeffwidman) +* Fix response error checking in KafkaAdminClient send_to_controller (jeffwidman) +* Fix NotControllerError check (jeffwidman) + +Core/Protocol +------------- +* Fix default protocol parser version / 0.8.2 version probe (dpkp / PR #1740) +* Make NotEnoughReplicasError/NotEnoughReplicasAfterAppendError retriable (le-linh / PR #1722) + +Bugfixes +-------- +* Use copy() in metrics() to avoid thread safety issues (emeric254 / PR #1682) + +Test Infrastructure +------------------- +* Mock dns lookups in test_conn (dpkp / PR #1739) +* Use test.fixtures.version not test.conftest.version to avoid warnings (dpkp / PR #1731) +* Fix test_legacy_correct_metadata_response on x86 arch (stanislavlevin / PR #1718) +* Travis CI: 'sudo' tag is now deprecated in Travis (cclauss / PR #1698) +* Use Popen.communicate() instead of Popen.wait() (Baisang / PR #1689) + +Compatibility +------------- +* Catch thrown OSError by python 3.7 when creating a connection (danjo133 / PR #1694) +* Update travis test coverage: 2.7, 3.4, 3.7, pypy2.7 (jeffwidman, dpkp / PR #1614) +* Drop dependency on sphinxcontrib-napoleon (stanislavlevin / PR #1715) +* Remove unused import from kafka/producer/record_accumulator.py (jeffwidman / PR #1705) +* Fix SSL connection testing in Python 3.7 (seanthegeek, silentben / PR #1669) + + +1.4.4 (Nov 20, 2018) +########## + +Bugfixes +-------- +* (Attempt to) Fix deadlock between consumer and heartbeat (zhgjun / dpkp #1628) +* Fix Metrics dict memory leak (kishorenc #1569) + +Client +------ +* Support Kafka record headers (hnousiainen #1574) +* Set socket timeout for the write-side of wake socketpair (Fleurer #1577) +* Add kerberos domain name config for gssapi sasl mechanism handshake (the-sea #1542) +* Support smaller topic metadata fetch during bootstrap (andyxning #1541) +* Use TypeError for invalid timeout type (jeffwidman #1636) +* Break poll if closed (dpkp) + +Admin Client +------------ +* Add KafkaAdminClient class (llamahunter #1540) +* Fix list_consumer_groups() to query all brokers (jeffwidman #1635) +* Stop using broker-errors for client-side problems (jeffwidman #1639) +* Fix send to controller (jeffwidman #1640) +* Add group coordinator lookup (jeffwidman #1641) +* Fix describe_groups (jeffwidman #1642) +* Add list_consumer_group_offsets() (jeffwidman #1643) +* Remove support for api versions as strings from KafkaAdminClient (jeffwidman #1644) +* Set a clear default value for `validate_only`/`include_synonyms` (jeffwidman #1645) +* Bugfix: Always set this_groups_coordinator_id (jeffwidman #1650) + +Consumer +-------- +* Fix linter warning on import of ConsumerRebalanceListener (ben-harack #1591) +* Remove ConsumerTimeout (emord #1587) +* Return future from commit_offsets_async() (ekimekim #1560) + +Core / Protocol +--------------- +* Add protocol structs for {Describe,Create,Delete} Acls (ulrikjohansson #1646/partial) +* Pre-compile pack/unpack function calls (billyevans / jeffwidman #1619) +* Don't use `kafka.common` internally (jeffwidman #1509) +* Be explicit with tuples for %s formatting (jeffwidman #1634) + +Documentation +------------- +* Document connections_max_idle_ms (jeffwidman #1531) +* Fix sphinx url (jeffwidman #1610) +* Update remote urls: snappy, https, etc (jeffwidman #1603) +* Minor cleanup of testing doc (jeffwidman #1613) +* Various docstring / pep8 / code hygiene cleanups (jeffwidman #1647) + +Test Infrastructure +------------------- +* Stop pinning `pylint` (jeffwidman #1611) +* (partial) Migrate from `Unittest` to `pytest` (jeffwidman #1620) +* Minor aesthetic cleanup of partitioner tests (jeffwidman #1618) +* Cleanup fixture imports (jeffwidman #1616) +* Fix typo in test file name (jeffwidman) +* Remove unused ivy_root variable (jeffwidman) +* Add test fixtures for kafka versions 1.0.2 -> 2.0.1 (dpkp) +* Bump travis test for 1.x brokers to 1.1.1 (dpkp) + +Logging / Error Messages +------------------------ +* raising logging level on messages signalling data loss (sibiryakov #1553) +* Stop using deprecated log.warn() (jeffwidman #1615) +* Fix typo in logging message (jeffwidman) + +Compatibility +------------- +* Vendor enum34 (jeffwidman #1604) +* Bump vendored `six` to `1.11.0` (jeffwidman #1602) +* Vendor `six` consistently (jeffwidman #1605) +* Prevent `pylint` import errors on `six.moves` (jeffwidman #1609) + + +1.4.3 (May 26, 2018) +#################### + +Compatibility +------------- +* Fix for python 3.7 support: remove 'async' keyword from SimpleProducer (dpkp #1454) + +Client +------ +* Improve BrokerConnection initialization time (romulorosa #1475) +* Ignore MetadataResponses with empty broker list (dpkp #1506) +* Improve connection handling when bootstrap list is invalid (dpkp #1507) + +Consumer +-------- +* Check for immediate failure when looking up coordinator in heartbeat thread (dpkp #1457) + +Core / Protocol +--------------- +* Always acquire client lock before coordinator lock to avoid deadlocks (dpkp #1464) +* Added AlterConfigs and DescribeConfigs apis (StephenSorriaux #1472) +* Fix CreatePartitionsRequest_v0 (StephenSorriaux #1469) +* Add codec validators to record parser and builder for all formats (tvoinarovskyi #1447) +* Fix MemoryRecord bugs re error handling and add test coverage (tvoinarovskyi #1448) +* Force lz4 to disable Kafka-unsupported block linking when encoding (mnito #1476) +* Stop shadowing `ConnectionError` (jeffwidman #1492) + +Documentation +------------- +* Document methods that return None (jeffwidman #1504) +* Minor doc capitalization cleanup (jeffwidman) +* Adds add_callback/add_errback example to docs (Berkodev #1441) +* Fix KafkaConsumer docstring for request_timeout_ms default (dpkp #1459) + +Test Infrastructure +------------------- +* Skip flakey SimpleProducer test (dpkp) +* Fix skipped integration tests if KAFKA_VERSION unset (dpkp #1453) + +Logging / Error Messages +------------------------ +* Stop using deprecated log.warn() (jeffwidman) +* Change levels for some heartbeat thread logging (dpkp #1456) +* Log Heartbeat thread start / close for debugging (dpkp) + + +1.4.2 (Mar 10, 2018) +#################### + +Bugfixes +-------- +* Close leaked selector in version check (dpkp #1425) +* Fix `BrokerConnection.connection_delay()` to return milliseconds (dpkp #1414) +* Use local copies in `Fetcher._fetchable_partitions` to avoid mutation errors (dpkp #1400) +* Fix error var name in `_unpack` (j2gg0s #1403) +* Fix KafkaConsumer compacted offset handling (dpkp #1397) +* Fix byte size estimation with kafka producer (blakeembrey #1393) +* Fix coordinator timeout in consumer poll interface (braedon #1384) + +Client +------ +* Add `BrokerConnection.connect_blocking()` to improve bootstrap to multi-address hostnames (dpkp #1411) +* Short-circuit `BrokerConnection.close()` if already disconnected (dpkp #1424) +* Only increase reconnect backoff if all addrinfos have been tried (dpkp #1423) +* Make BrokerConnection .host / .port / .afi immutable to avoid incorrect 'metadata changed' checks (dpkp #1422) +* Connect with sockaddrs to support non-zero ipv6 scope ids (dpkp #1433) +* Check timeout type in KafkaClient constructor (asdaraujo #1293) +* Update string representation of SimpleClient (asdaraujo #1293) +* Do not validate `api_version` against known versions (dpkp #1434) + +Consumer +-------- +* Avoid tight poll loop in consumer when brokers are down (dpkp #1415) +* Validate `max_records` in KafkaConsumer.poll (dpkp #1398) +* KAFKA-5512: Awake heartbeat thread when it is time to poll (dpkp #1439) + +Producer +-------- +* Validate that serializers generate bytes-like (or None) data (dpkp #1420) + +Core / Protocol +--------------- +* Support alternative lz4 package: lz4framed (everpcpc #1395) +* Use hardware accelerated CRC32C function if available (tvoinarovskyi #1389) +* Add Admin CreatePartitions API call (alexef #1386) + +Test Infrastructure +------------------- +* Close KafkaConsumer instances during tests (dpkp #1410) +* Introduce new fixtures to prepare for migration to pytest (asdaraujo #1293) +* Removed pytest-catchlog dependency (asdaraujo #1380) +* Fixes racing condition when message is sent to broker before topic logs are created (asdaraujo #1293) +* Add kafka 1.0.1 release to test fixtures (dpkp #1437) + +Logging / Error Messages +------------------------ +* Re-enable logging during broker version check (dpkp #1430) +* Connection logging cleanups (dpkp #1432) +* Remove old CommitFailed error message from coordinator (dpkp #1436) + + +1.4.1 (Feb 9, 2018) +################### + +Bugfixes +-------- +* Fix consumer poll stuck error when no available partition (ckyoog #1375) +* Increase some integration test timeouts (dpkp #1374) +* Use raw in case string overriden (jeffwidman #1373) +* Fix pending completion IndexError bug caused by multiple threads (dpkp #1372) + + +1.4.0 (Feb 6, 2018) +################### + +This is a substantial release. Although there are no known 'showstopper' bugs as of release, +we do recommend you test any planned upgrade to your application prior to running in production. + +Some of the major changes include: + +* We have officially dropped python 2.6 support +* The KafkaConsumer now includes a background thread to handle coordinator heartbeats +* API protocol handling has been separated from networking code into a new class, KafkaProtocol +* Added support for kafka message format v2 +* Refactored DNS lookups during kafka broker connections +* SASL authentication is working (we think) +* Removed several circular references to improve gc on close() + +Thanks to all contributors -- the state of the kafka-python community is strong! + +Detailed changelog are listed below: + +Client +------ +* Fixes for SASL support + + * Refactor SASL/gssapi support (dpkp #1248 #1249 #1257 #1262 #1280) + * Add security layer negotiation to the GSSAPI authentication (asdaraujo #1283) + * Fix overriding sasl_kerberos_service_name in KafkaConsumer / KafkaProducer (natedogs911 #1264) + * Fix typo in _try_authenticate_plain (everpcpc #1333) + * Fix for Python 3 byte string handling in SASL auth (christophelec #1353) + +* Move callback processing from BrokerConnection to KafkaClient (dpkp #1258) +* Use socket timeout of request_timeout_ms to prevent blocking forever on send (dpkp #1281) +* Refactor dns lookup in BrokerConnection (dpkp #1312) +* Read all available socket bytes (dpkp #1332) +* Honor reconnect_backoff in conn.connect() (dpkp #1342) + +Consumer +-------- +* KAFKA-3977: Defer fetch parsing for space efficiency, and to raise exceptions to user (dpkp #1245) +* KAFKA-4034: Avoid unnecessary consumer coordinator lookup (dpkp #1254) +* Handle lookup_coordinator send failures (dpkp #1279) +* KAFKA-3888 Use background thread to process consumer heartbeats (dpkp #1266) +* Improve KafkaConsumer cleanup (dpkp #1339) +* Fix coordinator join_future race condition (dpkp #1338) +* Avoid KeyError when filtering fetchable partitions (dpkp #1344) +* Name heartbeat thread with group_id; use backoff when polling (dpkp #1345) +* KAFKA-3949: Avoid race condition when subscription changes during rebalance (dpkp #1364) +* Fix #1239 regression to avoid consuming duplicate compressed messages from mid-batch (dpkp #1367) + +Producer +-------- +* Fix timestamp not passed to RecordMetadata (tvoinarovskyi #1273) +* Raise non-API exceptions (jeffwidman #1316) +* Fix reconnect_backoff_max_ms default config bug in KafkaProducer (YaoC #1352) + +Core / Protocol +--------------- +* Add kafka.protocol.parser.KafkaProtocol w/ receive and send (dpkp #1230) +* Refactor MessageSet and Message into LegacyRecordBatch to later support v2 message format (tvoinarovskyi #1252) +* Add DefaultRecordBatch implementation aka V2 message format parser/builder. (tvoinarovskyi #1185) +* optimize util.crc32 (ofek #1304) +* Raise better struct pack/unpack errors (jeffwidman #1320) +* Add Request/Response structs for kafka broker 1.0.0 (dpkp #1368) + +Bugfixes +-------- +* use python standard max value (lukekingbru #1303) +* changed for to use enumerate() (TheAtomicOption #1301) +* Explicitly check for None rather than falsey (jeffwidman #1269) +* Minor Exception cleanup (jeffwidman #1317) +* Use non-deprecated exception handling (jeffwidman a699f6a) +* Remove assertion with side effect in client.wakeup() (bgedik #1348) +* use absolute imports everywhere (kevinkjt2000 #1362) + +Test Infrastructure +------------------- +* Use 0.11.0.2 kafka broker for integration testing (dpkp #1357 #1244) +* Add a Makefile to help build the project, generate docs, and run tests (tvoinarovskyi #1247) +* Add fixture support for 1.0.0 broker (dpkp #1275) +* Add kafka 1.0.0 to travis integration tests (dpkp #1365) +* Change fixture default host to localhost (asdaraujo #1305) +* Minor test cleanups (dpkp #1343) +* Use latest pytest 3.4.0, but drop pytest-sugar due to incompatibility (dpkp #1361) + +Documentation +------------- +* Expand metrics docs (jeffwidman #1243) +* Fix docstring (jeffwidman #1261) +* Added controlled thread shutdown to example.py (TheAtomicOption #1268) +* Add license to wheel (jeffwidman #1286) +* Use correct casing for MB (jeffwidman #1298) + +Logging / Error Messages +------------------------ +* Fix two bugs in printing bytes instance (jeffwidman #1296) + + +1.3.5 (Oct 7, 2017) +#################### + +Bugfixes +-------- +* Fix partition assignment race condition (jeffwidman #1240) +* Fix consumer bug when seeking / resetting to the middle of a compressed messageset (dpkp #1239) +* Fix traceback sent to stderr not logging (dbgasaway #1221) +* Stop using mutable types for default arg values (jeffwidman #1213) +* Remove a few unused imports (jameslamb #1188) + +Client +------ +* Refactor BrokerConnection to use asynchronous receive_bytes pipe (dpkp #1032) + +Consumer +-------- +* Drop unused sleep kwarg to poll (dpkp #1177) +* Enable KafkaConsumer beginning_offsets() and end_offsets() with older broker versions (buptljy #1200) +* Validate consumer subscription topic strings (nikeee #1238) + +Documentation +------------- +* Small fixes to SASL documentation and logging; validate security_protocol (dpkp #1231) +* Various typo and grammar fixes (jeffwidman) + + +1.3.4 (Aug 13, 2017) +#################### + +Bugfixes +-------- +* Avoid multiple connection attempts when refreshing metadata (dpkp #1067) +* Catch socket.errors when sending / recving bytes on wake socketpair (dpkp #1069) +* Deal with brokers that reappear with different IP address (originsmike #1085) +* Fix join-time-max and sync-time-max metrics to use Max() measure function (billyevans #1146) +* Raise AssertionError when decompression unsupported (bts-webber #1159) +* Catch ssl.EOFErrors on Python3.3 so we close the failing conn (Ormod #1162) +* Select on sockets to avoid busy polling during bootstrap (dpkp #1175) +* Initialize metadata_snapshot in group coordinator to avoid unnecessary rebalance (dpkp #1174) + +Client +------ +* Timeout idle connections via connections_max_idle_ms (dpkp #1068) +* Warn, dont raise, on DNS lookup failures (dpkp #1091) +* Support exponential backoff for broker reconnections -- KIP-144 (dpkp #1124) +* Add gssapi support (Kerberos) for SASL (Harald-Berghoff #1152) +* Add private map of api key -> min/max versions to BrokerConnection (dpkp #1169) + +Consumer +-------- +* Backoff on unavailable group coordinator retry (dpkp #1125) +* Only change_subscription on pattern subscription when topics change (Artimi #1132) +* Add offsets_for_times, beginning_offsets and end_offsets APIs (tvoinarovskyi #1161) + +Producer +-------- +* Raise KafkaTimeoutError when flush times out (infecto) +* Set producer atexit timeout to 0 to match del (Ormod #1126) + +Core / Protocol +--------------- +* 0.11.0.0 protocol updates (only - no client support yet) (dpkp #1127) +* Make UnknownTopicOrPartitionError retriable error (tvoinarovskyi) + +Test Infrastructure +------------------- +* pylint 1.7.0+ supports python 3.6 and merge py36 into common testenv (jianbin-wei #1095) +* Add kafka 0.10.2.1 into integration testing version (jianbin-wei #1096) +* Disable automated tests for python 2.6 and kafka 0.8.0 and 0.8.1.1 (jianbin-wei #1096) +* Support manual py26 testing; dont advertise 3.3 support (dpkp) +* Add 0.11.0.0 server resources, fix tests for 0.11 brokers (dpkp) +* Use fixture hostname, dont assume localhost (dpkp) +* Add 0.11.0.0 to travis test matrix, remove 0.10.1.1; use scala 2.11 artifacts (dpkp #1176) + +Logging / Error Messages +------------------------ +* Improve error message when expiring batches in KafkaProducer (dpkp #1077) +* Update producer.send docstring -- raises KafkaTimeoutError (infecto) +* Use logging's built-in string interpolation (jeffwidman) +* Fix produce timeout message (melor #1151) +* Fix producer batch expiry messages to use seconds (dnwe) + +Documentation +------------- +* Fix typo in KafkaClient docstring (jeffwidman #1054) +* Update README: Prefer python-lz4 over lz4tools (kiri11 #1057) +* Fix poll() hyperlink in KafkaClient (jeffwidman) +* Update RTD links with https / .io (jeffwidman #1074) +* Describe consumer thread-safety (ecksun) +* Fix typo in consumer integration test (jeffwidman) +* Note max_in_flight_requests_per_connection > 1 may change order of messages (tvoinarovskyi #1149) + + +1.3.3 (Mar 14, 2017) +#################### + +Core / Protocol +--------------- +* Derive all api classes from Request / Response base classes (dpkp 1030) +* Prefer python-lz4 if available (dpkp 1024) +* Fix kwarg handing in kafka.protocol.struct.Struct (dpkp 1025) +* Fixed couple of "leaks" when gc is disabled (Mephius 979) +* Added `max_bytes` option and FetchRequest_v3 usage. (Drizzt1991 962) +* CreateTopicsRequest / Response v1 (dpkp 1012) +* Add MetadataRequest_v2 and MetadataResponse_v2 structures for KIP-78 (Drizzt1991 974) +* KIP-88 / KAFKA-3853: OffsetFetch v2 structs (jeffwidman 971) +* DRY-up the MetadataRequest_v1 struct (jeffwidman 966) +* Add JoinGroup v1 structs (jeffwidman 965) +* DRY-up the OffsetCommitResponse Structs (jeffwidman 970) +* DRY-up the OffsetFetch structs (jeffwidman 964) +* time --> timestamp to match Java API (jeffwidman 969) +* Add support for offsetRequestV1 messages (jlafaye 951) +* Add FetchRequest/Response_v3 structs (jeffwidman 943) +* Add CreateTopics / DeleteTopics Structs (jeffwidman 944) + +Test Infrastructure +------------------- +* Add python3.6 to travis test suite, drop python3.3 (exponea 992) +* Update to 0.10.1.1 for integration testing (dpkp 953) +* Update vendored berkerpeksag/selectors34 to ff61b82 (Mephius 979) +* Remove dead code (jeffwidman 967) +* Update pytest fixtures to new yield syntax (jeffwidman 919) + +Consumer +-------- +* Avoid re-encoding message for crc check (dpkp 1027) +* Optionally skip auto-commit during consumer.close (dpkp 1031) +* Return copy of consumer subscription set (dpkp 1029) +* Short-circuit group coordinator requests when NodeNotReady (dpkp 995) +* Avoid unknown coordinator after client poll (dpkp 1023) +* No longer configure a default consumer group (dpkp 1016) +* Dont refresh metadata on failed group coordinator request unless needed (dpkp 1006) +* Fail-fast on timeout constraint violations during KafkaConsumer creation (harelba 986) +* Default max_poll_records to Java default of 500 (jeffwidman 947) +* For 0.8.2, only attempt connection to coordinator if least_loaded_node succeeds (dpkp) + +Producer +-------- +* change default timeout of KafkaProducer.close() to threading.TIMEOUT_MAX on py3 (mmyjona 991) + +Client +------ +* Add optional kwarg to ready/is_ready to disable metadata-priority logic (dpkp 1017) +* When closing a broker connection without error, fail in-flight-requests with Cancelled (dpkp 1010) +* Catch socket errors during ssl handshake (dpkp 1007) +* Drop old brokers when rebuilding broker metadata (dpkp 1005) +* Drop bad disconnect test -- just use the mocked-socket test (dpkp 982) +* Add support for Python built without ssl (minagawa-sho 954) +* Do not re-close a disconnected connection (dpkp) +* Drop unused last_failure time from BrokerConnection (dpkp) +* Use connection state functions where possible (dpkp) +* Pass error to BrokerConnection.close() (dpkp) + +Bugfixes +-------- +* Free lz4 decompression context to avoid leak (dpkp 1024) +* Fix sasl reconnect bug: auth future must be reset on close (dpkp 1003) +* Fix raise exception from SubscriptionState.assign_from_subscribed (qntln 960) +* Fix blackout calculation: mark last_attempt time during connection close (dpkp 1008) +* Fix buffer pool reallocation after raising timeout (dpkp 999) + +Logging / Error Messages +------------------------ +* Add client info logging re bootstrap; log connection attempts to balance with close (dpkp) +* Minor additional logging for consumer coordinator (dpkp) +* Add more debug-level connection logging (dpkp) +* Do not need str(self) when formatting to %s (dpkp) +* Add new broker response errors (dpkp) +* Small style fixes in kafka.errors (dpkp) +* Include the node id in BrokerConnection logging (dpkp 1009) +* Replace %s with %r in producer debug log message (chekunkov 973) + +Documentation +------------- +* Sphinx documentation updates (jeffwidman 1019) +* Add sphinx formatting to hyperlink methods (jeffwidman 898) +* Fix BrokerConnection api_version docs default (jeffwidman 909) +* PEP-8: Spacing & removed unused imports (jeffwidman 899) +* Move BrokerConnection docstring to class (jeffwidman 968) +* Move docstring so it shows up in Sphinx/RTD (jeffwidman 952) +* Remove non-pip install instructions (jeffwidman 940) +* Spelling and grammar changes (melissacrawford396 923) +* Fix typo: coorelation --> correlation (jeffwidman 929) +* Make SSL warning list the correct Python versions (jeffwidman 924) +* Fixup comment reference to _maybe_connect (dpkp) +* Add ClusterMetadata sphinx documentation (dpkp) + +Legacy Client +------------- +* Add send_list_offset_request for searching offset by timestamp (charsyam 1001) +* Use select to poll sockets for read to reduce CPU usage (jianbin-wei 958) +* Use select.select without instance bounding (adamwen829 949) + + +1.3.2 (Dec 28, 2016) +#################### + +Core +---- +* Add kafka.serializer interfaces (dpkp 912) +* from kafka import ConsumerRebalanceListener, OffsetAndMetadata +* Use 0.10.0.1 for integration tests (dpkp 803) + +Consumer +-------- +* KAFKA-3007: KafkaConsumer max_poll_records (dpkp 831) +* Raise exception if given a non-str topic (ssaamm 824) +* Immediately update metadata for pattern subscription (laz2 915) + +Producer +-------- +* Update Partitioners for use with KafkaProducer (barrotsteindev 827) +* Sort partitions before calling partitioner (ms7s 905) +* Added ssl_password config option to KafkaProducer class (kierkegaard13 830) + +Client +------ +* Always check for request timeouts (dpkp 887) +* When hostname lookup is necessary, do every connect (benauthor 812) + +Bugfixes +-------- +* Fix errorcode check when socket.connect_ex raises an exception (guojh 907) +* Fix fetcher bug when processing offset out of range (sibiryakov 860) +* Fix possible request draining in ensure_active_group (dpkp 896) +* Fix metadata refresh handling with 0.10+ brokers when topic list is empty (sibiryakov 867) +* KafkaProducer should set timestamp in Message if provided (Drizzt1991 875) +* Fix murmur2 bug handling python2 bytes that do not ascii encode (dpkp 815) +* Monkeypatch max_in_flight_requests_per_connection when checking broker version (dpkp 834) +* Fix message timestamp_type (qix 828) + +Logging / Error Messages +------------------------ +* Always include an error for logging when the coordinator is marked dead (dpkp 890) +* Only string-ify BrokerResponseError args if provided (dpkp 889) +* Update warning re advertised.listeners / advertised.host.name (jeffwidman 878) +* Fix unrecognized sasl_mechanism error message (sharego 883) + +Documentation +------------- +* Add docstring for max_records (jeffwidman 897) +* Fixup doc references to max_in_flight_requests_per_connection +* Fix typo: passowrd --> password (jeffwidman 901) +* Fix documentation typo 'Defualt' -> 'Default'. (rolando 895) +* Added doc for `max_poll_records` option (Drizzt1991 881) +* Remove old design notes from Kafka 8 era (jeffwidman 876) +* Fix documentation typos (jeffwidman 874) +* Fix quota violation exception message (dpkp 809) +* Add comment for round robin partitioner with different subscriptions +* Improve KafkaProducer docstring for retries configuration + + +1.3.1 (Aug 8, 2016) +################### + +Bugfixes +-------- +* Fix AttributeError in BrokerConnectionMetrics after reconnecting + + +1.3.0 (Aug 4, 2016) +################### + +Incompatible Changes +-------------------- +* Delete KafkaConnection class (dpkp 769) +* Rename partition_assignment -> assignment in MemberMetadata for consistency +* Move selectors34 and socketpair to kafka.vendor (dpkp 785) +* Change api_version config to tuple; deprecate str with warning (dpkp 761) +* Rename _DEFAULT_CONFIG -> DEFAULT_CONFIG in KafkaProducer (dpkp 788) + +Improvements +------------ +* Vendor six 1.10.0 to eliminate runtime dependency (dpkp 785) +* Add KafkaProducer and KafkaConsumer.metrics() with instrumentation similar to java client (dpkp 754 / 772 / 794) +* Support Sasl PLAIN authentication (larsjsol PR 779) +* Add checksum and size to RecordMetadata and ConsumerRecord (KAFKA-3196 / 770 / 594) +* Use MetadataRequest v1 for 0.10+ api_version (dpkp 762) +* Fix KafkaConsumer autocommit for 0.8 brokers (dpkp 756 / 706) +* Improve error logging (dpkp 760 / 759) +* Adapt benchmark scripts from https://github.com/mrafayaleem/kafka-jython (dpkp 754) +* Add api_version config to KafkaClient (dpkp 761) +* New Metadata method with_partitions() (dpkp 787) +* Use socket_options configuration to setsockopts(). Default TCP_NODELAY (dpkp 783) +* Expose selector type as config option (dpkp 764) +* Drain pending requests to the coordinator before initiating group rejoin (dpkp 798) +* Send combined size and payload bytes to socket to avoid potentially split packets with TCP_NODELAY (dpkp 797) + +Bugfixes +-------- +* Ignore socket.error when checking for protocol out of sync prior to socket close (dpkp 792) +* Fix offset fetch when partitions are manually assigned (KAFKA-3960 / 786) +* Change pickle_method to use python3 special attributes (jpaulodit 777) +* Fix ProduceResponse v2 throttle_time_ms +* Always encode size with MessageSet (#771) +* Avoid buffer overread when compressing messageset in KafkaProducer +* Explicit format string argument indices for python 2.6 compatibility +* Simplify RecordMetadata; short circuit callbacks (#768) +* Fix autocommit when partitions assigned manually (KAFKA-3486 / #767 / #626) +* Handle metadata updates during consumer rebalance (KAFKA-3117 / #766 / #701) +* Add a consumer config option to exclude internal topics (KAFKA-2832 / #765) +* Protect writes to wakeup socket with threading lock (#763 / #709) +* Fetcher spending unnecessary time during metrics recording (KAFKA-3785) +* Always use absolute_import (dpkp) + +Test / Fixtures +--------------- +* Catch select errors while capturing test fixture logs +* Fix consumer group test race condition (dpkp 795) +* Retry fixture failures on a different port (dpkp 796) +* Dump fixture logs on failure + +Documentation +------------- +* Fix misspelling of password (ssaamm 793) +* Document the ssl_password config option (ssaamm 780) +* Fix typo in KafkaConsumer documentation (ssaamm 775) +* Expand consumer.fetcher inline comments +* Update kafka configuration links -> 0.10.0.0 docs +* Fixup metrics_sample_window_ms docstring in consumer + + +1.2.5 (July 15, 2016) +##################### + +Bugfixes +-------- +* Fix bug causing KafkaProducer to double-compress message batches on retry +* Check for double-compressed messages in KafkaConsumer, log warning and optionally skip +* Drop recursion in _unpack_message_set; only decompress once + + +1.2.4 (July 8, 2016) +#################### + +Bugfixes +-------- +* Update consumer_timeout_ms docstring - KafkaConsumer raises StopIteration, no longer ConsumerTimeout +* Use explicit subscription state flag to handle seek() during message iteration +* Fix consumer iteration on compacted topics (dpkp PR 752) +* Support ssl_password config when loading cert chains (amckemie PR 750) + + +1.2.3 (July 2, 2016) +#################### + +Patch Improvements +------------------ +* Fix gc error log: avoid AttributeError in _unregister_cleanup (dpkp PR 747) +* Wakeup socket optimizations (dpkp PR 740) +* Assert will be disabled by "python -O" (tyronecai PR 736) +* Randomize order of topics/partitions processed by fetcher to improve balance (dpkp PR 732) +* Allow client.check_version timeout to be set in Producer and Consumer constructors (eastlondoner PR 647) + + +1.2.2 (June 21, 2016) +##################### + +Bugfixes +-------- +* Clarify timeout unit in KafkaProducer close and flush (ms7s PR 734) +* Avoid busy poll during metadata refresh failure with retry_backoff_ms (dpkp PR 733) +* Check_version should scan nodes until version found or timeout (dpkp PR 731) +* Fix bug which could cause least_loaded_node to always return the same unavailable node (dpkp PR 730) +* Fix producer garbage collection with weakref in atexit handler (dpkp PR 728) +* Close client selector to fix fd leak (msmith PR 729) +* Tweak spelling mistake in error const (steve8918 PR 719) +* Rearrange connection tests to separate legacy KafkaConnection + + +1.2.1 (June 1, 2016) +#################### + +Bugfixes +-------- +* Fix regression in MessageSet decoding wrt PartialMessages (#716) +* Catch response decode errors and log details (#715) +* Fix Legacy support url (#712 - JonasGroeger) +* Update sphinx docs re 0.10 broker support + + +1.2.0 (May 24, 2016) +#################### + +Support Kafka 0.10 Features +--------------------------- +* Add protocol support for ApiVersionRequest (dpkp PR 678) +* KAFKA-3025: Message v1 -- add timetamp and relative offsets (dpkp PR 693) +* Use Fetch/Produce API v2 for brokers >= 0.10 (uses message format v1) (dpkp PR 694) +* Use standard LZ4 framing for v1 messages / kafka 0.10 (dpkp PR 695) + +Consumers +--------- +* Update SimpleConsumer / legacy protocol to handle compressed messages (paulcavallaro PR 684) + +Producers +--------- +* KAFKA-3388: Fix expiration of batches sitting in the accumulator (dpkp PR 699) +* KAFKA-3197: when max.in.flight.request.per.connection = 1, attempt to guarantee ordering (dpkp PR 698) +* Don't use soon-to-be-reserved keyword await as function name (FutureProduceResult) (dpkp PR 697) + +Clients +------- +* Fix socket leaks in KafkaClient (dpkp PR 696) + +Documentation +------------- + + +Internals +--------- +* Support SSL CRL [requires python 2.7.9+ / 3.4+] (vincentbernat PR 683) +* Use original hostname for SSL checks (vincentbernat PR 682) +* Always pass encoded message bytes to MessageSet.encode() +* Raise ValueError on protocol encode/decode errors +* Supplement socket.gaierror exception in BrokerConnection.connect() (erikbeebe PR 687) +* BrokerConnection check_version: expect 0.9 to fail with CorrelationIdError +* Fix small bug in Sensor (zackdever PR 679) + + +1.1.1 (Apr 26, 2016) +#################### + +Bugfixes +-------- +* Fix throttle_time_ms sensor handling (zackdever PR 667) +* Improve handling of disconnected sockets (EasyPost PR 666 / dpkp) +* Disable standard metadata refresh triggers during bootstrap (dpkp) +* More predictable Future callback/errback exceptions (zackdever PR 670) +* Avoid some exceptions in Coordinator.__del__ (dpkp PR 668) + + +1.1.0 (Apr 25, 2016) +#################### + +Consumers +--------- +* Avoid resending FetchRequests that are pending on internal queue +* Log debug messages when skipping fetched messages due to offset checks +* KAFKA-3013: Include topic-partition in exception for expired batches +* KAFKA-3318: clean up consumer logging and error messages +* Improve unknown coordinator error handling +* Improve auto-commit error handling when group_id is None +* Add paused() API (zackdever PR 602) +* Add default_offset_commit_callback to KafkaConsumer DEFAULT_CONFIGS + +Producers +--------- + + +Clients +------- +* Support SSL connections +* Use selectors module for non-blocking IO +* Refactor KafkaClient connection management +* Fix AttributeError in __del__ +* SimpleClient: catch errors thrown by _get_leader_for_partition (zackdever PR 606) + +Documentation +------------- +* Fix serializer/deserializer examples in README +* Update max.block.ms docstring +* Remove errant next(consumer) from consumer documentation +* Add producer.flush() to usage docs + +Internals +--------- +* Add initial metrics implementation (zackdever PR 637) +* KAFKA-2136: support Fetch and Produce v1 (throttle_time_ms) +* Use version-indexed lists for request/response protocol structs (dpkp PR 630) +* Split kafka.common into kafka.structs and kafka.errors +* Handle partial socket send() (dpkp PR 611) +* Fix windows support (dpkp PR 603) +* IPv6 support (TimEvens PR 615; Roguelazer PR 642) + + + + +1.0.2 (Mar 14, 2016) +#################### + +Consumers +--------- +* Improve KafkaConsumer Heartbeat handling (dpkp PR 583) +* Fix KafkaConsumer.position bug (stefanth PR 578) +* Raise TypeError when partition is not a TopicPartition (dpkp PR 587) +* KafkaConsumer.poll should sleep to prevent tight-loops (dpkp PR 597) + +Producers +--------- +* Fix producer threading bug that can crash sender (dpkp PR 590) +* Fix bug in producer buffer pool reallocation (dpkp PR 585) +* Remove spurious warnings when closing sync SimpleProducer (twm PR 567) +* Fix FutureProduceResult.await() on python2.6 (dpkp) +* Add optional timeout parameter to KafkaProducer.flush() (dpkp) +* KafkaProducer optimizations (zackdever PR 598) + +Clients +------- +* Improve error handling in SimpleClient.load_metadata_for_topics (dpkp) +* Improve handling of KafkaClient.least_loaded_node failure (dpkp PR 588) + +Documentation +------------- +* Fix KafkaError import error in docs (shichao-an PR 564) +* Fix serializer / deserializer examples (scribu PR 573) + +Internals +--------- +* Update to Kafka 0.9.0.1 for integration testing +* Fix ifr.future.failure in conn.py (mortenlj PR 566) +* Improve Zookeeper / Kafka Fixture management (dpkp) + + + +1.0.1 (Feb 19, 2016) +#################### + +Consumers +--------- +* Add RangePartitionAssignor (and use as default); add assignor tests (dpkp PR 550) +* Make sure all consumers are in same generation before stopping group test +* Verify node ready before sending offset fetch request from coordinator +* Improve warning when offset fetch request returns unknown topic / partition + +Producers +--------- +* Warn if pending batches failed during flush +* Fix concurrency bug in RecordAccumulator.ready() +* Fix bug in SimpleBufferPool memory condition waiting / timeout +* Support batch_size = 0 in producer buffers (dpkp PR 558) +* Catch duplicate batch.done() calls [e.g., maybe_expire then a response errback] + +Clients +------- + +Documentation +------------- +* Improve kafka.cluster docstrings +* Migrate load_example.py to KafkaProducer / KafkaConsumer + +Internals +--------- +* Don't override system rcvbuf or sndbuf unless configured explicitly (dpkp PR 557) +* Some attributes may not exist in __del__ if we failed assertions +* Break up some circular references and close client wake pipes on __del__ (aisch PR 554) + + +1.0.0 (Feb 15, 2016) +#################### + +This release includes significant code changes. Users of older kafka-python +versions are encouraged to test upgrades before deploying to production as +some interfaces and configuration options have changed. + +Users of SimpleConsumer / SimpleProducer / SimpleClient (formerly KafkaClient) +from prior releases should migrate to KafkaConsumer / KafkaProducer. Low-level +APIs (Simple*) are no longer being actively maintained and will be removed in a +future release. + +For comprehensive API documentation, please see python help() / docstrings, +kafka-python.readthedocs.org, or run 'tox -e docs' from source to build +documentation locally. + +Consumers +--------- +* KafkaConsumer re-written to emulate the new 0.9 kafka consumer (java client) + and support coordinated consumer groups (feature requires >= 0.9.0.0 brokers) + + * Methods no longer available: + + * configure [initialize a new consumer instead] + * set_topic_partitions [use subscribe() or assign()] + * fetch_messages [use poll() or iterator interface] + * get_partition_offsets + * offsets [use committed(partition)] + * task_done [handled internally by auto-commit; or commit offsets manually] + + * Configuration changes (consistent with updated java client): + + * lots of new configuration parameters -- see docs for details + * auto_offset_reset: previously values were 'smallest' or 'largest', now + values are 'earliest' or 'latest' + * fetch_wait_max_ms is now fetch_max_wait_ms + * max_partition_fetch_bytes is now max_partition_fetch_bytes + * deserializer_class is now value_deserializer and key_deserializer + * auto_commit_enable is now enable_auto_commit + * auto_commit_interval_messages was removed + * socket_timeout_ms was removed + * refresh_leader_backoff_ms was removed + +* SimpleConsumer and MultiProcessConsumer are now deprecated and will be removed + in a future release. Users are encouraged to migrate to KafkaConsumer. + +Producers +--------- +* new producer class: KafkaProducer. Exposes the same interface as official java client. + Async by default; returned future.get() can be called for synchronous blocking +* SimpleProducer is now deprecated and will be removed in a future release. Users are + encouraged to migrate to KafkaProducer. + +Clients +------- +* synchronous KafkaClient renamed to SimpleClient. For backwards compatibility, you + will get a SimpleClient via 'from kafka import KafkaClient'. This will change in + a future release. +* All client calls use non-blocking IO under the hood. +* Add probe method check_version() to infer broker versions. + +Documentation +------------- +* Updated README and sphinx documentation to address new classes. +* Docstring improvements to make python help() easier to use. + +Internals +--------- +* Old protocol stack is deprecated. It has been moved to kafka.protocol.legacy + and may be removed in a future release. +* Protocol layer re-written using Type classes, Schemas and Structs (modeled on + the java client). +* Add support for LZ4 compression (including broken framing header checksum). + + +0.9.5 (Dec 6, 2015) +################### + +Consumers +--------- +* Initial support for consumer coordinator: offsets only (toddpalino PR 420) +* Allow blocking until some messages are received in SimpleConsumer (saaros PR 457) +* Support subclass config changes in KafkaConsumer (zackdever PR 446) +* Support retry semantics in MultiProcessConsumer (barricadeio PR 456) +* Support partition_info in MultiProcessConsumer (scrapinghub PR 418) +* Enable seek() to an absolute offset in SimpleConsumer (haosdent PR 412) +* Add KafkaConsumer.close() (ucarion PR 426) + +Producers +--------- +* Catch client.reinit() exceptions in async producer (dpkp) +* Producer.stop() now blocks until async thread completes (dpkp PR 485) +* Catch errors during load_metadata_for_topics in async producer (bschopman PR 467) +* Add compression-level support for codecs that support it (trbs PR 454) +* Fix translation of Java murmur2 code, fix byte encoding for Python 3 (chrischamberlin PR 439) +* Only call stop() on not-stopped producer objects (docker-hub PR 435) +* Allow null payload for deletion feature (scrapinghub PR 409) + +Clients +------- +* Use non-blocking io for broker aware requests (ecanzonieri PR 473) +* Use debug logging level for metadata request (ecanzonieri PR 415) +* Catch KafkaUnavailableError in _send_broker_aware_request (mutability PR 436) +* Lower logging level on replica not available and commit (ecanzonieri PR 415) + +Documentation +------------- +* Update docs and links wrt maintainer change (mumrah -> dpkp) + +Internals +--------- +* Add py35 to tox testing +* Update travis config to use container infrastructure +* Add 0.8.2.2 and 0.9.0.0 resources for integration tests; update default official releases +* new pylint disables for pylint 1.5.1 (zackdever PR 481) +* Fix python3 / python2 comments re queue/Queue (dpkp) +* Add Murmur2Partitioner to kafka __all__ imports (dpkp Issue 471) +* Include LICENSE in PyPI sdist (koobs PR 441) + +0.9.4 (June 11, 2015) +##################### + +Consumers +--------- +* Refactor SimpleConsumer internal fetch handling (dpkp PR 399) +* Handle exceptions in SimpleConsumer commit() and reset_partition_offset() (dpkp PR 404) +* Improve FailedPayloadsError handling in KafkaConsumer (dpkp PR 398) +* KafkaConsumer: avoid raising KeyError in task_done (dpkp PR 389) +* MultiProcessConsumer -- support configured partitions list (dpkp PR 380) +* Fix SimpleConsumer leadership change handling (dpkp PR 393) +* Fix SimpleConsumer connection error handling (reAsOn2010 PR 392) +* Improve Consumer handling of 'falsy' partition values (wting PR 342) +* Fix _offsets call error in KafkaConsumer (hellais PR 376) +* Fix str/bytes bug in KafkaConsumer (dpkp PR 365) +* Register atexit handlers for consumer and producer thread/multiprocess cleanup (dpkp PR 360) +* Always fetch commit offsets in base consumer unless group is None (dpkp PR 356) +* Stop consumer threads on delete (dpkp PR 357) +* Deprecate metadata_broker_list in favor of bootstrap_servers in KafkaConsumer (dpkp PR 340) +* Support pass-through parameters in multiprocess consumer (scrapinghub PR 336) +* Enable offset commit on SimpleConsumer.seek (ecanzonieri PR 350) +* Improve multiprocess consumer partition distribution (scrapinghub PR 335) +* Ignore messages with offset less than requested (wkiser PR 328) +* Handle OffsetOutOfRange in SimpleConsumer (ecanzonieri PR 296) + +Producers +--------- +* Add Murmur2Partitioner (dpkp PR 378) +* Log error types in SimpleProducer and SimpleConsumer (dpkp PR 405) +* SimpleProducer support configuration of fail_on_error (dpkp PR 396) +* Deprecate KeyedProducer.send() (dpkp PR 379) +* Further improvements to async producer code (dpkp PR 388) +* Add more configuration parameters for async producer (dpkp) +* Deprecate SimpleProducer batch_send=True in favor of async (dpkp) +* Improve async producer error handling and retry logic (vshlapakov PR 331) +* Support message keys in async producer (vshlapakov PR 329) +* Use threading instead of multiprocessing for Async Producer (vshlapakov PR 330) +* Stop threads on __del__ (chmduquesne PR 324) +* Fix leadership failover handling in KeyedProducer (dpkp PR 314) + +KafkaClient +----------- +* Add .topics property for list of known topics (dpkp) +* Fix request / response order guarantee bug in KafkaClient (dpkp PR 403) +* Improve KafkaClient handling of connection failures in _get_conn (dpkp) +* Client clears local metadata cache before updating from server (dpkp PR 367) +* KafkaClient should return a response or error for each request - enable better retry handling (dpkp PR 366) +* Improve str/bytes conversion in KafkaClient and KafkaConsumer (dpkp PR 332) +* Always return sorted partition ids in client.get_partition_ids_for_topic() (dpkp PR 315) + +Documentation +------------- +* Cleanup Usage Documentation +* Improve KafkaConsumer documentation (dpkp PR 341) +* Update consumer documentation (sontek PR 317) +* Add doc configuration for tox (sontek PR 316) +* Switch to .rst doc format (sontek PR 321) +* Fixup google groups link in README (sontek PR 320) +* Automate documentation at kafka-python.readthedocs.org + +Internals +--------- +* Switch integration testing from 0.8.2.0 to 0.8.2.1 (dpkp PR 402) +* Fix most flaky tests, improve debug logging, improve fixture handling (dpkp) +* General style cleanups (dpkp PR 394) +* Raise error on duplicate topic-partition payloads in protocol grouping (dpkp) +* Use module-level loggers instead of simply 'kafka' (dpkp) +* Remove pkg_resources check for __version__ at runtime (dpkp PR 387) +* Make external API consistently support python3 strings for topic (kecaps PR 361) +* Fix correlation id overflow (dpkp PR 355) +* Cleanup kafka/common structs (dpkp PR 338) +* Use context managers in gzip_encode / gzip_decode (dpkp PR 337) +* Save failed request as FailedPayloadsError attribute (jobevers PR 302) +* Remove unused kafka.queue (mumrah) + +0.9.3 (Feb 3, 2015) +################### + +* Add coveralls.io support (sontek PR 307) +* Fix python2.6 threading.Event bug in ReentrantTimer (dpkp PR 312) +* Add kafka 0.8.2.0 to travis integration tests (dpkp PR 310) +* Auto-convert topics to utf-8 bytes in Producer (sontek PR 306) +* Fix reference cycle between SimpleConsumer and ReentrantTimer (zhaopengzp PR 309) +* Add Sphinx API docs (wedaly PR 282) +* Handle additional error cases exposed by 0.8.2.0 kafka server (dpkp PR 295) +* Refactor error class management (alexcb PR 289) +* Expose KafkaConsumer in __all__ for easy imports (Dinoshauer PR 286) +* SimpleProducer starts on random partition by default (alexcb PR 288) +* Add keys to compressed messages (meandthewallaby PR 281) +* Add new high-level KafkaConsumer class based on java client api (dpkp PR 234) +* Add KeyedProducer.send_messages api (pubnub PR 277) +* Fix consumer pending() method (jettify PR 276) +* Update low-level demo in README (sunisdown PR 274) +* Include key in KeyedProducer messages (se7entyse7en PR 268) +* Fix SimpleConsumer timeout behavior in get_messages (dpkp PR 238) +* Fix error in consumer.py test against max_buffer_size (rthille/wizzat PR 225/242) +* Improve string concat performance on pypy / py3 (dpkp PR 233) +* Reorg directory layout for consumer/producer/partitioners (dpkp/wizzat PR 232/243) +* Add OffsetCommitContext (locationlabs PR 217) +* Metadata Refactor (dpkp PR 223) +* Add Python 3 support (brutasse/wizzat - PR 227) +* Minor cleanups - imports / README / PyPI classifiers (dpkp - PR 221) +* Fix socket test (dpkp - PR 222) +* Fix exception catching bug in test_failover_integration (zever - PR 216) + +0.9.2 (Aug 26, 2014) +#################### + +* Warn users that async producer does not reliably handle failures (dpkp - PR 213) +* Fix spurious ConsumerFetchSizeTooSmall error in consumer (DataDog - PR 136) +* Use PyLint for static error checking (dpkp - PR 208) +* Strictly enforce str message type in producer.send_messages (dpkp - PR 211) +* Add test timers via nose-timer plugin; list 10 slowest timings by default (dpkp) +* Move fetching last known offset logic to a stand alone function (zever - PR 177) +* Improve KafkaConnection and add more tests (dpkp - PR 196) +* Raise TypeError if necessary when encoding strings (mdaniel - PR 204) +* Use Travis-CI to publish tagged releases to pypi (tkuhlman / mumrah) +* Use official binary tarballs for integration tests and parallelize travis tests (dpkp - PR 193) +* Improve new-topic creation handling (wizzat - PR 174) + +0.9.1 (Aug 10, 2014) +#################### + +* Add codec parameter to Producers to enable compression (patricklucas - PR 166) +* Support IPv6 hosts and network (snaury - PR 169) +* Remove dependency on distribute (patricklucas - PR 163) +* Fix connection error timeout and improve tests (wizzat - PR 158) +* SimpleProducer randomization of initial round robin ordering (alexcb - PR 139) +* Fix connection timeout in KafkaClient and KafkaConnection (maciejkula - PR 161) +* Fix seek + commit behavior (wizzat - PR 148) + + +0.9.0 (Mar 21, 2014) +#################### + +* Connection refactor and test fixes (wizzat - PR 134) +* Fix when partition has no leader (mrtheb - PR 109) +* Change Producer API to take topic as send argument, not as instance variable (rdiomar - PR 111) +* Substantial refactor and Test Fixing (rdiomar - PR 88) +* Fix Multiprocess Consumer on windows (mahendra - PR 62) +* Improve fault tolerance; add integration tests (jimjh) +* PEP8 / Flakes / Style cleanups (Vetoshkin Nikita; mrtheb - PR 59) +* Setup Travis CI (jimjh - PR 53/54) +* Fix import of BufferUnderflowError (jimjh - PR 49) +* Fix code examples in README (StevenLeRoux - PR 47/48) + +0.8.0 +##### + +* Changing auto_commit to False in [SimpleConsumer](kafka/consumer.py), until 0.8.1 is release offset commits are unsupported +* Adding fetch_size_bytes to SimpleConsumer constructor to allow for user-configurable fetch sizes +* Allow SimpleConsumer to automatically increase the fetch size if a partial message is read and no other messages were read during that fetch request. The increase factor is 1.5 +* Exception classes moved to kafka.common diff --git a/testbed/dpkp__kafka-python/docs/compatibility.rst b/testbed/dpkp__kafka-python/docs/compatibility.rst new file mode 100644 index 0000000000000000000000000000000000000000..b3ad00634461bb51ea6dd40f4a3b909465a562ee --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/compatibility.rst @@ -0,0 +1,21 @@ +Compatibility +------------- + +.. image:: https://img.shields.io/badge/kafka-2.6%2C%202.5%2C%202.4%2C%202.3%2C%202.2%2C%202.1%2C%202.0%2C%201.1%2C%201.0%2C%200.11%2C%200.10%2C%200.9%2C%200.8-brightgreen.svg + :target: https://kafka-python.readthedocs.io/compatibility.html +.. image:: https://img.shields.io/pypi/pyversions/kafka-python.svg + :target: https://pypi.python.org/pypi/kafka-python + +kafka-python is compatible with (and tested against) broker versions 2.6 +through 0.8.0 . kafka-python is not compatible with the 0.8.2-beta release. + +Because the kafka server protocol is backwards compatible, kafka-python is +expected to work with newer broker releases as well. + +Although kafka-python is tested and expected to work on recent broker versions, +not all features are supported. Specifically, authentication codecs, and +transactional producer/consumer support are not fully implemented. PRs welcome! + +kafka-python is tested on python 2.7, 3.4, 3.7, 3.8 and pypy2.7. + +Builds and tests via Travis-CI. See https://travis-ci.org/dpkp/kafka-python diff --git a/testbed/dpkp__kafka-python/docs/conf.py b/testbed/dpkp__kafka-python/docs/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..efa8d0807a7a9d25a698981f79f1ca7285484e90 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/conf.py @@ -0,0 +1,270 @@ +# -*- coding: utf-8 -*- +# +# kafka-python documentation build configuration file, created by +# sphinx-quickstart on Sun Jan 4 12:21:50 2015. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx.ext.viewcode', + 'sphinx.ext.napoleon', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'kafka-python' +copyright = u'2016 -- Dana Powers, David Arthur, and Contributors' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +exec(open('../kafka/version.py').read()) + +# The full version, including alpha/beta/rc tags. +release = __version__ + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'kafka-pythondoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ('index', 'kafka-python.tex', u'kafka-python Documentation', + u'Dana Powers', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'kafka-python', u'kafka-python Documentation', + [u'Dana Powers'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'kafka-python', u'kafka-python Documentation', + u'Dana Powers', 'kafka-python', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +on_rtd = os.environ.get('READTHEDOCS', None) == 'True' + +if not on_rtd: # only import and set the theme if we're building docs locally + import sphinx_rtd_theme + html_theme = 'sphinx_rtd_theme' + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] diff --git a/testbed/dpkp__kafka-python/docs/index.rst b/testbed/dpkp__kafka-python/docs/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..1f2a4ce98ed85e73cb2fa0bf0665d0ec907a51d6 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/index.rst @@ -0,0 +1,169 @@ +kafka-python +############ + +.. image:: https://img.shields.io/badge/kafka-2.6%2C%202.5%2C%202.4%2C%202.3%2C%202.2%2C%202.1%2C%202.0%2C%201.1%2C%201.0%2C%200.11%2C%200.10%2C%200.9%2C%200.8-brightgreen.svg + :target: https://kafka-python.readthedocs.io/compatibility.html +.. image:: https://img.shields.io/pypi/pyversions/kafka-python.svg + :target: https://pypi.python.org/pypi/kafka-python +.. image:: https://coveralls.io/repos/dpkp/kafka-python/badge.svg?branch=master&service=github + :target: https://coveralls.io/github/dpkp/kafka-python?branch=master +.. image:: https://travis-ci.org/dpkp/kafka-python.svg?branch=master + :target: https://travis-ci.org/dpkp/kafka-python +.. image:: https://img.shields.io/badge/license-Apache%202-blue.svg + :target: https://github.com/dpkp/kafka-python/blob/master/LICENSE + +Python client for the Apache Kafka distributed stream processing system. +kafka-python is designed to function much like the official java client, with a +sprinkling of pythonic interfaces (e.g., consumer iterators). + +kafka-python is best used with newer brokers (0.9+), but is backwards-compatible with +older versions (to 0.8.0). Some features will only be enabled on newer brokers. +For example, fully coordinated consumer groups -- i.e., dynamic +partition assignment to multiple consumers in the same group -- requires use of +0.9 kafka brokers. Supporting this feature for earlier broker releases would +require writing and maintaining custom leadership election and membership / +health check code (perhaps using zookeeper or consul). For older brokers, you +can achieve something similar by manually assigning different partitions to +each consumer instance with config management tools like chef, ansible, etc. +This approach will work fine, though it does not support rebalancing on +failures. See `Compatibility `_ for more details. + +Please note that the master branch may contain unreleased features. For release +documentation, please see readthedocs and/or python's inline help. + +>>> pip install kafka-python + + +KafkaConsumer +************* + +:class:`~kafka.KafkaConsumer` is a high-level message consumer, intended to +operate as similarly as possible to the official java client. Full support +for coordinated consumer groups requires use of kafka brokers that support the +Group APIs: kafka v0.9+. + +See `KafkaConsumer `_ for API and configuration details. + +The consumer iterator returns ConsumerRecords, which are simple namedtuples +that expose basic message attributes: topic, partition, offset, key, and value: + +>>> from kafka import KafkaConsumer +>>> consumer = KafkaConsumer('my_favorite_topic') +>>> for msg in consumer: +... print (msg) + +>>> # join a consumer group for dynamic partition assignment and offset commits +>>> from kafka import KafkaConsumer +>>> consumer = KafkaConsumer('my_favorite_topic', group_id='my_favorite_group') +>>> for msg in consumer: +... print (msg) + +>>> # manually assign the partition list for the consumer +>>> from kafka import TopicPartition +>>> consumer = KafkaConsumer(bootstrap_servers='localhost:1234') +>>> consumer.assign([TopicPartition('foobar', 2)]) +>>> msg = next(consumer) + +>>> # Deserialize msgpack-encoded values +>>> consumer = KafkaConsumer(value_deserializer=msgpack.loads) +>>> consumer.subscribe(['msgpackfoo']) +>>> for msg in consumer: +... assert isinstance(msg.value, dict) + + +KafkaProducer +************* + +:class:`~kafka.KafkaProducer` is a high-level, asynchronous message producer. +The class is intended to operate as similarly as possible to the official java +client. See `KafkaProducer `_ for more details. + +>>> from kafka import KafkaProducer +>>> producer = KafkaProducer(bootstrap_servers='localhost:1234') +>>> for _ in range(100): +... producer.send('foobar', b'some_message_bytes') + +>>> # Block until a single message is sent (or timeout) +>>> future = producer.send('foobar', b'another_message') +>>> result = future.get(timeout=60) + +>>> # Block until all pending messages are at least put on the network +>>> # NOTE: This does not guarantee delivery or success! It is really +>>> # only useful if you configure internal batching using linger_ms +>>> producer.flush() + +>>> # Use a key for hashed-partitioning +>>> producer.send('foobar', key=b'foo', value=b'bar') + +>>> # Serialize json messages +>>> import json +>>> producer = KafkaProducer(value_serializer=lambda v: json.dumps(v).encode('utf-8')) +>>> producer.send('fizzbuzz', {'foo': 'bar'}) + +>>> # Serialize string keys +>>> producer = KafkaProducer(key_serializer=str.encode) +>>> producer.send('flipflap', key='ping', value=b'1234') + +>>> # Compress messages +>>> producer = KafkaProducer(compression_type='gzip') +>>> for i in range(1000): +... producer.send('foobar', b'msg %d' % i) + + +Thread safety +************* + +The KafkaProducer can be used across threads without issue, unlike the +KafkaConsumer which cannot. + +While it is possible to use the KafkaConsumer in a thread-local manner, +multiprocessing is recommended. + + +Compression +*********** + +kafka-python supports the following compression formats: + + - gzip + - LZ4 + - Snappy + - Zstandard (zstd) + +gzip is supported natively, the others require installing additional libraries. +See `Install `_ for more information. + + +Optimized CRC32 Validation +************************** + +Kafka uses CRC32 checksums to validate messages. kafka-python includes a pure +python implementation for compatibility. To improve performance for high-throughput +applications, kafka-python will use `crc32c` for optimized native code if installed. +See `Install `_ for installation instructions and +https://pypi.org/project/crc32c/ for details on the underlying crc32c lib. + + +Protocol +******** + +A secondary goal of kafka-python is to provide an easy-to-use protocol layer +for interacting with kafka brokers via the python repl. This is useful for +testing, probing, and general experimentation. The protocol support is +leveraged to enable a :meth:`~kafka.KafkaClient.check_version()` +method that probes a kafka broker and +attempts to identify which version it is running (0.8.0 to 2.6+). + + +.. toctree:: + :hidden: + :maxdepth: 2 + + Usage Overview + API + install + tests + compatibility + support + license + changelog diff --git a/testbed/dpkp__kafka-python/docs/install.rst b/testbed/dpkp__kafka-python/docs/install.rst new file mode 100644 index 0000000000000000000000000000000000000000..19901ee2966a9b843dba0160f2fe0073016a0326 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/install.rst @@ -0,0 +1,93 @@ +Install +####### + +Install with your favorite package manager + +Latest Release +************** +Pip: + +.. code:: bash + + pip install kafka-python + +Releases are also listed at https://github.com/dpkp/kafka-python/releases + + +Bleeding-Edge +************* + +.. code:: bash + + git clone https://github.com/dpkp/kafka-python + pip install ./kafka-python + + +Optional crc32c install +*********************** +Highly recommended if you are using Kafka 11+ brokers. For those `kafka-python` +uses a new message protocol version, that requires calculation of `crc32c`, +which differs from the `zlib.crc32` hash implementation. By default `kafka-python` +calculates it in pure python, which is quite slow. To speed it up we optionally +support https://pypi.python.org/pypi/crc32c package if it's installed. + +.. code:: bash + + pip install 'kafka-python[crc32c]' + + +Optional ZSTD install +******************** + +To enable ZSTD compression/decompression, install python-zstandard: + +>>> pip install 'kafka-python[zstd]' + + +Optional LZ4 install +******************** + +To enable LZ4 compression/decompression, install python-lz4: + +>>> pip install 'kafka-python[lz4]' + + +Optional Snappy install +*********************** + +Install Development Libraries +============================= + +Download and build Snappy from https://google.github.io/snappy/ + +Ubuntu: + +.. code:: bash + + apt-get install libsnappy-dev + +OSX: + +.. code:: bash + + brew install snappy + +From Source: + +.. code:: bash + + wget https://github.com/google/snappy/releases/download/1.1.3/snappy-1.1.3.tar.gz + tar xzvf snappy-1.1.3.tar.gz + cd snappy-1.1.3 + ./configure + make + sudo make install + +Install Python Module +===================== + +Install the `python-snappy` module + +.. code:: bash + + pip install 'kafka-python[snappy]' diff --git a/testbed/dpkp__kafka-python/docs/license.rst b/testbed/dpkp__kafka-python/docs/license.rst new file mode 100644 index 0000000000000000000000000000000000000000..e9d5c9adb35bf6860665fb71e646e1e7617de185 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/license.rst @@ -0,0 +1,10 @@ +License +------- + +.. image:: https://img.shields.io/badge/license-Apache%202-blue.svg + :target: https://github.com/dpkp/kafka-python/blob/master/LICENSE + +Apache License, v2.0. See `LICENSE `_. + +Copyright 2016, Dana Powers, David Arthur, and Contributors +(See `AUTHORS `_). diff --git a/testbed/dpkp__kafka-python/docs/make.bat b/testbed/dpkp__kafka-python/docs/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..3332a3a1b80c891814f7795d3b869d06f0b94af7 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/make.bat @@ -0,0 +1,242 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\kafka-python.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\kafka-python.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end diff --git a/testbed/dpkp__kafka-python/docs/requirements.txt b/testbed/dpkp__kafka-python/docs/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..0f095e07491256f8a6890ae485a7041a7ead68b0 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/requirements.txt @@ -0,0 +1,7 @@ +sphinx +sphinx_rtd_theme + +# Install kafka-python in editable mode +# This allows the sphinx autodoc module +# to load the Python modules and extract docstrings. +# -e .. diff --git a/testbed/dpkp__kafka-python/docs/support.rst b/testbed/dpkp__kafka-python/docs/support.rst new file mode 100644 index 0000000000000000000000000000000000000000..63d4a86a2c13989700f988bfe55e388cabcbb5ae --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/support.rst @@ -0,0 +1,11 @@ +Support +------- + +For support, see github issues at https://github.com/dpkp/kafka-python + +Limited IRC chat at #kafka-python on freenode (general chat is #apache-kafka). + +For information about Apache Kafka generally, see https://kafka.apache.org/ + +For general discussion of kafka-client design and implementation (not python +specific), see https://groups.google.com/forum/m/#!forum/kafka-clients diff --git a/testbed/dpkp__kafka-python/docs/tests.rst b/testbed/dpkp__kafka-python/docs/tests.rst new file mode 100644 index 0000000000000000000000000000000000000000..561179ca5949f9daead186028b686cfd231aa249 --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/tests.rst @@ -0,0 +1,86 @@ +Tests +===== + +.. image:: https://coveralls.io/repos/dpkp/kafka-python/badge.svg?branch=master&service=github + :target: https://coveralls.io/github/dpkp/kafka-python?branch=master +.. image:: https://travis-ci.org/dpkp/kafka-python.svg?branch=master + :target: https://travis-ci.org/dpkp/kafka-python + +Test environments are managed via tox. The test suite is run via pytest. + +Linting is run via pylint, but is generally skipped on pypy due to pylint +compatibility / performance issues. + +For test coverage details, see https://coveralls.io/github/dpkp/kafka-python + +The test suite includes unit tests that mock network interfaces, as well as +integration tests that setup and teardown kafka broker (and zookeeper) +fixtures for client / consumer / producer testing. + + +Unit tests +------------------ + +To run the tests locally, install tox: + +.. code:: bash + + pip install tox + +For more details, see https://tox.readthedocs.io/en/latest/install.html + +Then simply run tox, optionally setting the python environment. +If unset, tox will loop through all environments. + +.. code:: bash + + tox -e py27 + tox -e py35 + + # run protocol tests only + tox -- -v test.test_protocol + + # re-run the last failing test, dropping into pdb + tox -e py27 -- --lf --pdb + + # see available (pytest) options + tox -e py27 -- --help + + +Integration tests +----------------- + +.. code:: bash + + KAFKA_VERSION=0.8.2.2 tox -e py27 + KAFKA_VERSION=1.0.1 tox -e py36 + + +Integration tests start Kafka and Zookeeper fixtures. This requires downloading +kafka server binaries: + +.. code:: bash + + ./build_integration.sh + +By default, this will install the broker versions listed in build_integration.sh's `ALL_RELEASES` +into the servers/ directory. To install a specific version, set the `KAFKA_VERSION` variable: + +.. code:: bash + + KAFKA_VERSION=1.0.1 ./build_integration.sh + +Then to run the tests against a specific Kafka version, simply set the `KAFKA_VERSION` +env variable to the server build you want to use for testing: + +.. code:: bash + + KAFKA_VERSION=1.0.1 tox -e py36 + +To test against the kafka source tree, set KAFKA_VERSION=trunk +[optionally set SCALA_VERSION (defaults to the value set in `build_integration.sh`)] + +.. code:: bash + + SCALA_VERSION=2.12 KAFKA_VERSION=trunk ./build_integration.sh + KAFKA_VERSION=trunk tox -e py36 diff --git a/testbed/dpkp__kafka-python/docs/usage.rst b/testbed/dpkp__kafka-python/docs/usage.rst new file mode 100644 index 0000000000000000000000000000000000000000..1cf1aa4140640eea89cdfae80a2f09bd4e1a070d --- /dev/null +++ b/testbed/dpkp__kafka-python/docs/usage.rst @@ -0,0 +1,110 @@ +Usage +***** + + +KafkaConsumer +============= + +.. code:: python + + from kafka import KafkaConsumer + + # To consume latest messages and auto-commit offsets + consumer = KafkaConsumer('my-topic', + group_id='my-group', + bootstrap_servers=['localhost:9092']) + for message in consumer: + # message value and key are raw bytes -- decode if necessary! + # e.g., for unicode: `message.value.decode('utf-8')` + print ("%s:%d:%d: key=%s value=%s" % (message.topic, message.partition, + message.offset, message.key, + message.value)) + + # consume earliest available messages, don't commit offsets + KafkaConsumer(auto_offset_reset='earliest', enable_auto_commit=False) + + # consume json messages + KafkaConsumer(value_deserializer=lambda m: json.loads(m.decode('ascii'))) + + # consume msgpack + KafkaConsumer(value_deserializer=msgpack.unpackb) + + # StopIteration if no message after 1sec + KafkaConsumer(consumer_timeout_ms=1000) + + # Subscribe to a regex topic pattern + consumer = KafkaConsumer() + consumer.subscribe(pattern='^awesome.*') + + # Use multiple consumers in parallel w/ 0.9 kafka brokers + # typically you would run each on a different server / process / CPU + consumer1 = KafkaConsumer('my-topic', + group_id='my-group', + bootstrap_servers='my.server.com') + consumer2 = KafkaConsumer('my-topic', + group_id='my-group', + bootstrap_servers='my.server.com') + + +There are many configuration options for the consumer class. See +:class:`~kafka.KafkaConsumer` API documentation for more details. + + +KafkaProducer +============== + +.. code:: python + + from kafka import KafkaProducer + from kafka.errors import KafkaError + + producer = KafkaProducer(bootstrap_servers=['broker1:1234']) + + # Asynchronous by default + future = producer.send('my-topic', b'raw_bytes') + + # Block for 'synchronous' sends + try: + record_metadata = future.get(timeout=10) + except KafkaError: + # Decide what to do if produce request failed... + log.exception() + pass + + # Successful result returns assigned partition and offset + print (record_metadata.topic) + print (record_metadata.partition) + print (record_metadata.offset) + + # produce keyed messages to enable hashed partitioning + producer.send('my-topic', key=b'foo', value=b'bar') + + # encode objects via msgpack + producer = KafkaProducer(value_serializer=msgpack.dumps) + producer.send('msgpack-topic', {'key': 'value'}) + + # produce json messages + producer = KafkaProducer(value_serializer=lambda m: json.dumps(m).encode('ascii')) + producer.send('json-topic', {'key': 'value'}) + + # produce asynchronously + for _ in range(100): + producer.send('my-topic', b'msg') + + def on_send_success(record_metadata): + print(record_metadata.topic) + print(record_metadata.partition) + print(record_metadata.offset) + + def on_send_error(excp): + log.error('I am an errback', exc_info=excp) + # handle exception + + # produce asynchronously with callbacks + producer.send('my-topic', b'raw_bytes').add_callback(on_send_success).add_errback(on_send_error) + + # block until all async messages are sent + producer.flush() + + # configure multiple retries + producer = KafkaProducer(retries=5) diff --git a/testbed/dpkp__kafka-python/kafka/consumer/__init__.py b/testbed/dpkp__kafka-python/kafka/consumer/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e09bcc1b8e81026841c0a237e1b644d1ea094455 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/consumer/__init__.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import + +from kafka.consumer.group import KafkaConsumer + +__all__ = [ + 'KafkaConsumer' +] diff --git a/testbed/dpkp__kafka-python/kafka/consumer/fetcher.py b/testbed/dpkp__kafka-python/kafka/consumer/fetcher.py new file mode 100644 index 0000000000000000000000000000000000000000..7ff9daf7be498e5c3ecbe720dc5c9b0426e0f514 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/consumer/fetcher.py @@ -0,0 +1,1016 @@ +from __future__ import absolute_import + +import collections +import copy +import logging +import random +import sys +import time + +from kafka.vendor import six + +import kafka.errors as Errors +from kafka.future import Future +from kafka.metrics.stats import Avg, Count, Max, Rate +from kafka.protocol.fetch import FetchRequest +from kafka.protocol.offset import ( + OffsetRequest, OffsetResetStrategy, UNKNOWN_OFFSET +) +from kafka.record import MemoryRecords +from kafka.serializer import Deserializer +from kafka.structs import TopicPartition, OffsetAndTimestamp + +log = logging.getLogger(__name__) + + +# Isolation levels +READ_UNCOMMITTED = 0 +READ_COMMITTED = 1 + +ConsumerRecord = collections.namedtuple("ConsumerRecord", + ["topic", "partition", "offset", "timestamp", "timestamp_type", + "key", "value", "headers", "checksum", "serialized_key_size", "serialized_value_size", "serialized_header_size"]) + + +CompletedFetch = collections.namedtuple("CompletedFetch", + ["topic_partition", "fetched_offset", "response_version", + "partition_data", "metric_aggregator"]) + + +class NoOffsetForPartitionError(Errors.KafkaError): + pass + + +class RecordTooLargeError(Errors.KafkaError): + pass + + +class Fetcher(six.Iterator): + DEFAULT_CONFIG = { + 'key_deserializer': None, + 'value_deserializer': None, + 'fetch_min_bytes': 1, + 'fetch_max_wait_ms': 500, + 'fetch_max_bytes': 52428800, + 'max_partition_fetch_bytes': 1048576, + 'max_poll_records': sys.maxsize, + 'check_crcs': True, + 'iterator_refetch_records': 1, # undocumented -- interface may change + 'metric_group_prefix': 'consumer', + 'api_version': (0, 8, 0), + 'retry_backoff_ms': 100 + } + + def __init__(self, client, subscriptions, metrics, **configs): + """Initialize a Kafka Message Fetcher. + + Keyword Arguments: + key_deserializer (callable): Any callable that takes a + raw message key and returns a deserialized key. + value_deserializer (callable, optional): Any callable that takes a + raw message value and returns a deserialized value. + fetch_min_bytes (int): Minimum amount of data the server should + return for a fetch request, otherwise wait up to + fetch_max_wait_ms for more data to accumulate. Default: 1. + fetch_max_wait_ms (int): The maximum amount of time in milliseconds + the server will block before answering the fetch request if + there isn't sufficient data to immediately satisfy the + requirement given by fetch_min_bytes. Default: 500. + fetch_max_bytes (int): The maximum amount of data the server should + return for a fetch request. This is not an absolute maximum, if + the first message in the first non-empty partition of the fetch + is larger than this value, the message will still be returned + to ensure that the consumer can make progress. NOTE: consumer + performs fetches to multiple brokers in parallel so memory + usage will depend on the number of brokers containing + partitions for the topic. + Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 MB). + max_partition_fetch_bytes (int): The maximum amount of data + per-partition the server will return. The maximum total memory + used for a request = #partitions * max_partition_fetch_bytes. + This size must be at least as large as the maximum message size + the server allows or else it is possible for the producer to + send messages larger than the consumer can fetch. If that + happens, the consumer can get stuck trying to fetch a large + message on a certain partition. Default: 1048576. + check_crcs (bool): Automatically check the CRC32 of the records + consumed. This ensures no on-the-wire or on-disk corruption to + the messages occurred. This check adds some overhead, so it may + be disabled in cases seeking extreme performance. Default: True + """ + self.config = copy.copy(self.DEFAULT_CONFIG) + for key in self.config: + if key in configs: + self.config[key] = configs[key] + + self._client = client + self._subscriptions = subscriptions + self._completed_fetches = collections.deque() # Unparsed responses + self._next_partition_records = None # Holds a single PartitionRecords until fully consumed + self._iterator = None + self._fetch_futures = collections.deque() + self._sensors = FetchManagerMetrics(metrics, self.config['metric_group_prefix']) + self._isolation_level = READ_UNCOMMITTED + + def send_fetches(self): + """Send FetchRequests for all assigned partitions that do not already have + an in-flight fetch or pending fetch data. + + Returns: + List of Futures: each future resolves to a FetchResponse + """ + futures = [] + for node_id, request in six.iteritems(self._create_fetch_requests()): + if self._client.ready(node_id): + log.debug("Sending FetchRequest to node %s", node_id) + future = self._client.send(node_id, request, wakeup=False) + future.add_callback(self._handle_fetch_response, request, time.time()) + future.add_errback(log.error, 'Fetch to node %s failed: %s', node_id) + futures.append(future) + self._fetch_futures.extend(futures) + self._clean_done_fetch_futures() + return futures + + def reset_offsets_if_needed(self, partitions): + """Lookup and set offsets for any partitions which are awaiting an + explicit reset. + + Arguments: + partitions (set of TopicPartitions): the partitions to reset + """ + for tp in partitions: + # TODO: If there are several offsets to reset, we could submit offset requests in parallel + if self._subscriptions.is_assigned(tp) and self._subscriptions.is_offset_reset_needed(tp): + self._reset_offset(tp) + + def _clean_done_fetch_futures(self): + while True: + if not self._fetch_futures: + break + if not self._fetch_futures[0].is_done: + break + self._fetch_futures.popleft() + + def in_flight_fetches(self): + """Return True if there are any unprocessed FetchRequests in flight.""" + self._clean_done_fetch_futures() + return bool(self._fetch_futures) + + def update_fetch_positions(self, partitions): + """Update the fetch positions for the provided partitions. + + Arguments: + partitions (list of TopicPartitions): partitions to update + + Raises: + NoOffsetForPartitionError: if no offset is stored for a given + partition and no reset policy is available + """ + # reset the fetch position to the committed position + for tp in partitions: + if not self._subscriptions.is_assigned(tp): + log.warning("partition %s is not assigned - skipping offset" + " update", tp) + continue + elif self._subscriptions.is_fetchable(tp): + log.warning("partition %s is still fetchable -- skipping offset" + " update", tp) + continue + + if self._subscriptions.is_offset_reset_needed(tp): + self._reset_offset(tp) + elif self._subscriptions.assignment[tp].committed is None: + # there's no committed position, so we need to reset with the + # default strategy + self._subscriptions.need_offset_reset(tp) + self._reset_offset(tp) + else: + committed = self._subscriptions.assignment[tp].committed.offset + log.debug("Resetting offset for partition %s to the committed" + " offset %s", tp, committed) + self._subscriptions.seek(tp, committed) + + def get_offsets_by_times(self, timestamps, timeout_ms): + offsets = self._retrieve_offsets(timestamps, timeout_ms) + for tp in timestamps: + if tp not in offsets: + offsets[tp] = None + else: + offset, timestamp = offsets[tp] + offsets[tp] = OffsetAndTimestamp(offset, timestamp) + return offsets + + def beginning_offsets(self, partitions, timeout_ms): + return self.beginning_or_end_offset( + partitions, OffsetResetStrategy.EARLIEST, timeout_ms) + + def end_offsets(self, partitions, timeout_ms): + return self.beginning_or_end_offset( + partitions, OffsetResetStrategy.LATEST, timeout_ms) + + def beginning_or_end_offset(self, partitions, timestamp, timeout_ms): + timestamps = dict([(tp, timestamp) for tp in partitions]) + offsets = self._retrieve_offsets(timestamps, timeout_ms) + for tp in timestamps: + offsets[tp] = offsets[tp][0] + return offsets + + def _reset_offset(self, partition): + """Reset offsets for the given partition using the offset reset strategy. + + Arguments: + partition (TopicPartition): the partition that needs reset offset + + Raises: + NoOffsetForPartitionError: if no offset reset strategy is defined + """ + timestamp = self._subscriptions.assignment[partition].reset_strategy + if timestamp is OffsetResetStrategy.EARLIEST: + strategy = 'earliest' + elif timestamp is OffsetResetStrategy.LATEST: + strategy = 'latest' + else: + raise NoOffsetForPartitionError(partition) + + log.debug("Resetting offset for partition %s to %s offset.", + partition, strategy) + offsets = self._retrieve_offsets({partition: timestamp}) + + if partition in offsets: + offset = offsets[partition][0] + + # we might lose the assignment while fetching the offset, + # so check it is still active + if self._subscriptions.is_assigned(partition): + self._subscriptions.seek(partition, offset) + else: + log.debug("Could not find offset for partition %s since it is probably deleted" % (partition,)) + + def _retrieve_offsets(self, timestamps, timeout_ms=float("inf")): + """Fetch offset for each partition passed in ``timestamps`` map. + + Blocks until offsets are obtained, a non-retriable exception is raised + or ``timeout_ms`` passed. + + Arguments: + timestamps: {TopicPartition: int} dict with timestamps to fetch + offsets by. -1 for the latest available, -2 for the earliest + available. Otherwise timestamp is treated as epoch milliseconds. + + Returns: + {TopicPartition: (int, int)}: Mapping of partition to + retrieved offset and timestamp. If offset does not exist for + the provided timestamp, that partition will be missing from + this mapping. + """ + if not timestamps: + return {} + + start_time = time.time() + remaining_ms = timeout_ms + timestamps = copy.copy(timestamps) + while remaining_ms > 0: + if not timestamps: + return {} + + future = self._send_offset_requests(timestamps) + self._client.poll(future=future, timeout_ms=remaining_ms) + + if future.succeeded(): + return future.value + if not future.retriable(): + raise future.exception # pylint: disable-msg=raising-bad-type + + elapsed_ms = (time.time() - start_time) * 1000 + remaining_ms = timeout_ms - elapsed_ms + if remaining_ms < 0: + break + + if future.exception.invalid_metadata: + refresh_future = self._client.cluster.request_update() + self._client.poll(future=refresh_future, timeout_ms=remaining_ms) + + # Issue #1780 + # Recheck partition existence after after a successful metadata refresh + if refresh_future.succeeded() and isinstance(future.exception, Errors.StaleMetadata): + log.debug("Stale metadata was raised, and we now have an updated metadata. Rechecking partition existence") + unknown_partition = future.exception.args[0] # TopicPartition from StaleMetadata + if self._client.cluster.leader_for_partition(unknown_partition) is None: + log.debug("Removed partition %s from offsets retrieval" % (unknown_partition, )) + timestamps.pop(unknown_partition) + else: + time.sleep(self.config['retry_backoff_ms'] / 1000.0) + + elapsed_ms = (time.time() - start_time) * 1000 + remaining_ms = timeout_ms - elapsed_ms + + raise Errors.KafkaTimeoutError( + "Failed to get offsets by timestamps in %s ms" % (timeout_ms,)) + + def fetched_records(self, max_records=None, update_offsets=True): + """Returns previously fetched records and updates consumed offsets. + + Arguments: + max_records (int): Maximum number of records returned. Defaults + to max_poll_records configuration. + + Raises: + OffsetOutOfRangeError: if no subscription offset_reset_strategy + CorruptRecordException: if message crc validation fails (check_crcs + must be set to True) + RecordTooLargeError: if a message is larger than the currently + configured max_partition_fetch_bytes + TopicAuthorizationError: if consumer is not authorized to fetch + messages from the topic + + Returns: (records (dict), partial (bool)) + records: {TopicPartition: [messages]} + partial: True if records returned did not fully drain any pending + partition requests. This may be useful for choosing when to + pipeline additional fetch requests. + """ + if max_records is None: + max_records = self.config['max_poll_records'] + assert max_records > 0 + + drained = collections.defaultdict(list) + records_remaining = max_records + + while records_remaining > 0: + if not self._next_partition_records: + if not self._completed_fetches: + break + completion = self._completed_fetches.popleft() + self._next_partition_records = self._parse_fetched_data(completion) + else: + records_remaining -= self._append(drained, + self._next_partition_records, + records_remaining, + update_offsets) + return dict(drained), bool(self._completed_fetches) + + def _append(self, drained, part, max_records, update_offsets): + if not part: + return 0 + + tp = part.topic_partition + fetch_offset = part.fetch_offset + if not self._subscriptions.is_assigned(tp): + # this can happen when a rebalance happened before + # fetched records are returned to the consumer's poll call + log.debug("Not returning fetched records for partition %s" + " since it is no longer assigned", tp) + else: + # note that the position should always be available + # as long as the partition is still assigned + position = self._subscriptions.assignment[tp].position + if not self._subscriptions.is_fetchable(tp): + # this can happen when a partition is paused before + # fetched records are returned to the consumer's poll call + log.debug("Not returning fetched records for assigned partition" + " %s since it is no longer fetchable", tp) + + elif fetch_offset == position: + # we are ensured to have at least one record since we already checked for emptiness + part_records = part.take(max_records) + next_offset = part_records[-1].offset + 1 + + log.log(0, "Returning fetched records at offset %d for assigned" + " partition %s and update position to %s", position, + tp, next_offset) + + for record in part_records: + drained[tp].append(record) + + if update_offsets: + self._subscriptions.assignment[tp].position = next_offset + return len(part_records) + + else: + # these records aren't next in line based on the last consumed + # position, ignore them they must be from an obsolete request + log.debug("Ignoring fetched records for %s at offset %s since" + " the current position is %d", tp, part.fetch_offset, + position) + + part.discard() + return 0 + + def _message_generator(self): + """Iterate over fetched_records""" + while self._next_partition_records or self._completed_fetches: + + if not self._next_partition_records: + completion = self._completed_fetches.popleft() + self._next_partition_records = self._parse_fetched_data(completion) + continue + + # Send additional FetchRequests when the internal queue is low + # this should enable moderate pipelining + if len(self._completed_fetches) <= self.config['iterator_refetch_records']: + self.send_fetches() + + tp = self._next_partition_records.topic_partition + + # We can ignore any prior signal to drop pending message sets + # because we are starting from a fresh one where fetch_offset == position + # i.e., the user seek()'d to this position + self._subscriptions.assignment[tp].drop_pending_message_set = False + + for msg in self._next_partition_records.take(): + + # Because we are in a generator, it is possible for + # subscription state to change between yield calls + # so we need to re-check on each loop + # this should catch assignment changes, pauses + # and resets via seek_to_beginning / seek_to_end + if not self._subscriptions.is_fetchable(tp): + log.debug("Not returning fetched records for partition %s" + " since it is no longer fetchable", tp) + self._next_partition_records = None + break + + # If there is a seek during message iteration, + # we should stop unpacking this message set and + # wait for a new fetch response that aligns with the + # new seek position + elif self._subscriptions.assignment[tp].drop_pending_message_set: + log.debug("Skipping remainder of message set for partition %s", tp) + self._subscriptions.assignment[tp].drop_pending_message_set = False + self._next_partition_records = None + break + + # Compressed messagesets may include earlier messages + elif msg.offset < self._subscriptions.assignment[tp].position: + log.debug("Skipping message offset: %s (expecting %s)", + msg.offset, + self._subscriptions.assignment[tp].position) + continue + + self._subscriptions.assignment[tp].position = msg.offset + 1 + yield msg + + self._next_partition_records = None + + def _unpack_message_set(self, tp, records): + try: + batch = records.next_batch() + while batch is not None: + + # LegacyRecordBatch cannot access either base_offset or last_offset_delta + try: + self._subscriptions.assignment[tp].last_offset_from_message_batch = batch.base_offset + \ + batch.last_offset_delta + except AttributeError: + pass + + for record in batch: + key_size = len(record.key) if record.key is not None else -1 + value_size = len(record.value) if record.value is not None else -1 + key = self._deserialize( + self.config['key_deserializer'], + tp.topic, record.key) + value = self._deserialize( + self.config['value_deserializer'], + tp.topic, record.value) + headers = record.headers + header_size = sum( + len(h_key.encode("utf-8")) + (len(h_val) if h_val is not None else 0) for h_key, h_val in + headers) if headers else -1 + yield ConsumerRecord( + tp.topic, tp.partition, record.offset, record.timestamp, + record.timestamp_type, key, value, headers, record.checksum, + key_size, value_size, header_size) + + batch = records.next_batch() + + # If unpacking raises StopIteration, it is erroneously + # caught by the generator. We want all exceptions to be raised + # back to the user. See Issue 545 + except StopIteration as e: + log.exception('StopIteration raised unpacking messageset') + raise RuntimeError('StopIteration raised unpacking messageset') + + def __iter__(self): # pylint: disable=non-iterator-returned + return self + + def __next__(self): + if not self._iterator: + self._iterator = self._message_generator() + try: + return next(self._iterator) + except StopIteration: + self._iterator = None + raise + + def _deserialize(self, f, topic, bytes_): + if not f: + return bytes_ + if isinstance(f, Deserializer): + return f.deserialize(topic, bytes_) + return f(bytes_) + + def _send_offset_requests(self, timestamps): + """Fetch offsets for each partition in timestamps dict. This may send + request to multiple nodes, based on who is Leader for partition. + + Arguments: + timestamps (dict): {TopicPartition: int} mapping of fetching + timestamps. + + Returns: + Future: resolves to a mapping of retrieved offsets + """ + timestamps_by_node = collections.defaultdict(dict) + for partition, timestamp in six.iteritems(timestamps): + node_id = self._client.cluster.leader_for_partition(partition) + if node_id is None: + self._client.add_topic(partition.topic) + log.debug("Partition %s is unknown for fetching offset," + " wait for metadata refresh", partition) + return Future().failure(Errors.StaleMetadata(partition)) + elif node_id == -1: + log.debug("Leader for partition %s unavailable for fetching " + "offset, wait for metadata refresh", partition) + return Future().failure( + Errors.LeaderNotAvailableError(partition)) + else: + timestamps_by_node[node_id][partition] = timestamp + + # Aggregate results until we have all + list_offsets_future = Future() + responses = [] + node_count = len(timestamps_by_node) + + def on_success(value): + responses.append(value) + if len(responses) == node_count: + offsets = {} + for r in responses: + offsets.update(r) + list_offsets_future.success(offsets) + + def on_fail(err): + if not list_offsets_future.is_done: + list_offsets_future.failure(err) + + for node_id, timestamps in six.iteritems(timestamps_by_node): + _f = self._send_offset_request(node_id, timestamps) + _f.add_callback(on_success) + _f.add_errback(on_fail) + return list_offsets_future + + def _send_offset_request(self, node_id, timestamps): + by_topic = collections.defaultdict(list) + for tp, timestamp in six.iteritems(timestamps): + if self.config['api_version'] >= (0, 10, 1): + data = (tp.partition, timestamp) + else: + data = (tp.partition, timestamp, 1) + by_topic[tp.topic].append(data) + + if self.config['api_version'] >= (0, 10, 1): + request = OffsetRequest[1](-1, list(six.iteritems(by_topic))) + else: + request = OffsetRequest[0](-1, list(six.iteritems(by_topic))) + + # Client returns a future that only fails on network issues + # so create a separate future and attach a callback to update it + # based on response error codes + future = Future() + + _f = self._client.send(node_id, request) + _f.add_callback(self._handle_offset_response, future) + _f.add_errback(lambda e: future.failure(e)) + return future + + def _handle_offset_response(self, future, response): + """Callback for the response of the list offset call above. + + Arguments: + future (Future): the future to update based on response + response (OffsetResponse): response from the server + + Raises: + AssertionError: if response does not match partition + """ + timestamp_offset_map = {} + for topic, part_data in response.topics: + for partition_info in part_data: + partition, error_code = partition_info[:2] + partition = TopicPartition(topic, partition) + error_type = Errors.for_code(error_code) + if error_type is Errors.NoError: + if response.API_VERSION == 0: + offsets = partition_info[2] + assert len(offsets) <= 1, 'Expected OffsetResponse with one offset' + if not offsets: + offset = UNKNOWN_OFFSET + else: + offset = offsets[0] + log.debug("Handling v0 ListOffsetResponse response for %s. " + "Fetched offset %s", partition, offset) + if offset != UNKNOWN_OFFSET: + timestamp_offset_map[partition] = (offset, None) + else: + timestamp, offset = partition_info[2:] + log.debug("Handling ListOffsetResponse response for %s. " + "Fetched offset %s, timestamp %s", + partition, offset, timestamp) + if offset != UNKNOWN_OFFSET: + timestamp_offset_map[partition] = (offset, timestamp) + elif error_type is Errors.UnsupportedForMessageFormatError: + # The message format on the broker side is before 0.10.0, + # we simply put None in the response. + log.debug("Cannot search by timestamp for partition %s because the" + " message format version is before 0.10.0", partition) + elif error_type is Errors.NotLeaderForPartitionError: + log.debug("Attempt to fetch offsets for partition %s failed due" + " to obsolete leadership information, retrying.", + partition) + future.failure(error_type(partition)) + return + elif error_type is Errors.UnknownTopicOrPartitionError: + log.warning("Received unknown topic or partition error in ListOffset " + "request for partition %s. The topic/partition " + + "may not exist or the user may not have Describe access " + "to it.", partition) + future.failure(error_type(partition)) + return + else: + log.warning("Attempt to fetch offsets for partition %s failed due to:" + " %s", partition, error_type) + future.failure(error_type(partition)) + return + if not future.is_done: + future.success(timestamp_offset_map) + + def _fetchable_partitions(self): + fetchable = self._subscriptions.fetchable_partitions() + # do not fetch a partition if we have a pending fetch response to process + current = self._next_partition_records + pending = copy.copy(self._completed_fetches) + if current: + fetchable.discard(current.topic_partition) + for fetch in pending: + fetchable.discard(fetch.topic_partition) + return fetchable + + def _create_fetch_requests(self): + """Create fetch requests for all assigned partitions, grouped by node. + + FetchRequests skipped if no leader, or node has requests in flight + + Returns: + dict: {node_id: FetchRequest, ...} (version depends on api_version) + """ + # create the fetch info as a dict of lists of partition info tuples + # which can be passed to FetchRequest() via .items() + fetchable = collections.defaultdict(lambda: collections.defaultdict(list)) + + for partition in self._fetchable_partitions(): + node_id = self._client.cluster.leader_for_partition(partition) + + # advance position for any deleted compacted messages if required + if self._subscriptions.assignment[partition].last_offset_from_message_batch: + next_offset_from_batch_header = self._subscriptions.assignment[partition].last_offset_from_message_batch + 1 + if next_offset_from_batch_header > self._subscriptions.assignment[partition].position: + log.debug( + "Advance position for partition %s from %s to %s (last message batch location plus one)" + " to correct for deleted compacted messages", + partition, self._subscriptions.assignment[partition].position, next_offset_from_batch_header) + self._subscriptions.assignment[partition].position = next_offset_from_batch_header + + position = self._subscriptions.assignment[partition].position + + # fetch if there is a leader and no in-flight requests + if node_id is None or node_id == -1: + log.debug("No leader found for partition %s." + " Requesting metadata update", partition) + self._client.cluster.request_update() + + elif self._client.in_flight_request_count(node_id) == 0: + partition_info = ( + partition.partition, + position, + self.config['max_partition_fetch_bytes'] + ) + fetchable[node_id][partition.topic].append(partition_info) + log.debug("Adding fetch request for partition %s at offset %d", + partition, position) + else: + log.log(0, "Skipping fetch for partition %s because there is an inflight request to node %s", + partition, node_id) + + if self.config['api_version'] >= (0, 11, 0): + version = 4 + elif self.config['api_version'] >= (0, 10, 1): + version = 3 + elif self.config['api_version'] >= (0, 10): + version = 2 + elif self.config['api_version'] == (0, 9): + version = 1 + else: + version = 0 + requests = {} + for node_id, partition_data in six.iteritems(fetchable): + if version < 3: + requests[node_id] = FetchRequest[version]( + -1, # replica_id + self.config['fetch_max_wait_ms'], + self.config['fetch_min_bytes'], + partition_data.items()) + else: + # As of version == 3 partitions will be returned in order as + # they are requested, so to avoid starvation with + # `fetch_max_bytes` option we need this shuffle + # NOTE: we do have partition_data in random order due to usage + # of unordered structures like dicts, but that does not + # guarantee equal distribution, and starting in Python3.6 + # dicts retain insert order. + partition_data = list(partition_data.items()) + random.shuffle(partition_data) + if version == 3: + requests[node_id] = FetchRequest[version]( + -1, # replica_id + self.config['fetch_max_wait_ms'], + self.config['fetch_min_bytes'], + self.config['fetch_max_bytes'], + partition_data) + else: + requests[node_id] = FetchRequest[version]( + -1, # replica_id + self.config['fetch_max_wait_ms'], + self.config['fetch_min_bytes'], + self.config['fetch_max_bytes'], + self._isolation_level, + partition_data) + return requests + + def _handle_fetch_response(self, request, send_time, response): + """The callback for fetch completion""" + fetch_offsets = {} + for topic, partitions in request.topics: + for partition_data in partitions: + partition, offset = partition_data[:2] + fetch_offsets[TopicPartition(topic, partition)] = offset + + partitions = set([TopicPartition(topic, partition_data[0]) + for topic, partitions in response.topics + for partition_data in partitions]) + metric_aggregator = FetchResponseMetricAggregator(self._sensors, partitions) + + # randomized ordering should improve balance for short-lived consumers + random.shuffle(response.topics) + for topic, partitions in response.topics: + random.shuffle(partitions) + for partition_data in partitions: + tp = TopicPartition(topic, partition_data[0]) + completed_fetch = CompletedFetch( + tp, fetch_offsets[tp], + response.API_VERSION, + partition_data[1:], + metric_aggregator + ) + self._completed_fetches.append(completed_fetch) + + if response.API_VERSION >= 1: + self._sensors.fetch_throttle_time_sensor.record(response.throttle_time_ms) + self._sensors.fetch_latency.record((time.time() - send_time) * 1000) + + def _parse_fetched_data(self, completed_fetch): + tp = completed_fetch.topic_partition + fetch_offset = completed_fetch.fetched_offset + num_bytes = 0 + records_count = 0 + parsed_records = None + + error_code, highwater = completed_fetch.partition_data[:2] + error_type = Errors.for_code(error_code) + + try: + if not self._subscriptions.is_fetchable(tp): + # this can happen when a rebalance happened or a partition + # consumption paused while fetch is still in-flight + log.debug("Ignoring fetched records for partition %s" + " since it is no longer fetchable", tp) + + elif error_type is Errors.NoError: + self._subscriptions.assignment[tp].highwater = highwater + + # we are interested in this fetch only if the beginning + # offset (of the *request*) matches the current consumed position + # Note that the *response* may return a messageset that starts + # earlier (e.g., compressed messages) or later (e.g., compacted topic) + position = self._subscriptions.assignment[tp].position + if position is None or position != fetch_offset: + log.debug("Discarding fetch response for partition %s" + " since its offset %d does not match the" + " expected offset %d", tp, fetch_offset, + position) + return None + + records = MemoryRecords(completed_fetch.partition_data[-1]) + if records.has_next(): + log.debug("Adding fetched record for partition %s with" + " offset %d to buffered record list", tp, + position) + unpacked = list(self._unpack_message_set(tp, records)) + parsed_records = self.PartitionRecords(fetch_offset, tp, unpacked) + if unpacked: + last_offset = unpacked[-1].offset + self._sensors.records_fetch_lag.record(highwater - last_offset) + num_bytes = records.valid_bytes() + records_count = len(unpacked) + elif records.size_in_bytes() > 0: + # we did not read a single message from a non-empty + # buffer because that message's size is larger than + # fetch size, in this case record this exception + record_too_large_partitions = {tp: fetch_offset} + raise RecordTooLargeError( + "There are some messages at [Partition=Offset]: %s " + " whose size is larger than the fetch size %s" + " and hence cannot be ever returned." + " Increase the fetch size, or decrease the maximum message" + " size the broker will allow." % ( + record_too_large_partitions, + self.config['max_partition_fetch_bytes']), + record_too_large_partitions) + self._sensors.record_topic_fetch_metrics(tp.topic, num_bytes, records_count) + + elif error_type in (Errors.NotLeaderForPartitionError, + Errors.UnknownTopicOrPartitionError): + self._client.cluster.request_update() + elif error_type is Errors.OffsetOutOfRangeError: + position = self._subscriptions.assignment[tp].position + if position is None or position != fetch_offset: + log.debug("Discarding stale fetch response for partition %s" + " since the fetched offset %d does not match the" + " current offset %d", tp, fetch_offset, position) + elif self._subscriptions.has_default_offset_reset_policy(): + log.info("Fetch offset %s is out of range for topic-partition %s", fetch_offset, tp) + self._subscriptions.need_offset_reset(tp) + else: + raise Errors.OffsetOutOfRangeError({tp: fetch_offset}) + + elif error_type is Errors.TopicAuthorizationFailedError: + log.warning("Not authorized to read from topic %s.", tp.topic) + raise Errors.TopicAuthorizationFailedError(set(tp.topic)) + elif error_type is Errors.UnknownError: + log.warning("Unknown error fetching data for topic-partition %s", tp) + else: + raise error_type('Unexpected error while fetching data') + + finally: + completed_fetch.metric_aggregator.record(tp, num_bytes, records_count) + + return parsed_records + + class PartitionRecords(object): + def __init__(self, fetch_offset, tp, messages): + self.fetch_offset = fetch_offset + self.topic_partition = tp + self.messages = messages + # When fetching an offset that is in the middle of a + # compressed batch, we will get all messages in the batch. + # But we want to start 'take' at the fetch_offset + # (or the next highest offset in case the message was compacted) + for i, msg in enumerate(messages): + if msg.offset < fetch_offset: + log.debug("Skipping message offset: %s (expecting %s)", + msg.offset, fetch_offset) + else: + self.message_idx = i + break + + else: + self.message_idx = 0 + self.messages = None + + # For truthiness evaluation we need to define __len__ or __nonzero__ + def __len__(self): + if self.messages is None or self.message_idx >= len(self.messages): + return 0 + return len(self.messages) - self.message_idx + + def discard(self): + self.messages = None + + def take(self, n=None): + if not len(self): + return [] + if n is None or n > len(self): + n = len(self) + next_idx = self.message_idx + n + res = self.messages[self.message_idx:next_idx] + self.message_idx = next_idx + # fetch_offset should be incremented by 1 to parallel the + # subscription position (also incremented by 1) + self.fetch_offset = max(self.fetch_offset, res[-1].offset + 1) + return res + + +class FetchResponseMetricAggregator(object): + """ + Since we parse the message data for each partition from each fetch + response lazily, fetch-level metrics need to be aggregated as the messages + from each partition are parsed. This class is used to facilitate this + incremental aggregation. + """ + def __init__(self, sensors, partitions): + self.sensors = sensors + self.unrecorded_partitions = partitions + self.total_bytes = 0 + self.total_records = 0 + + def record(self, partition, num_bytes, num_records): + """ + After each partition is parsed, we update the current metric totals + with the total bytes and number of records parsed. After all partitions + have reported, we write the metric. + """ + self.unrecorded_partitions.remove(partition) + self.total_bytes += num_bytes + self.total_records += num_records + + # once all expected partitions from the fetch have reported in, record the metrics + if not self.unrecorded_partitions: + self.sensors.bytes_fetched.record(self.total_bytes) + self.sensors.records_fetched.record(self.total_records) + + +class FetchManagerMetrics(object): + def __init__(self, metrics, prefix): + self.metrics = metrics + self.group_name = '%s-fetch-manager-metrics' % (prefix,) + + self.bytes_fetched = metrics.sensor('bytes-fetched') + self.bytes_fetched.add(metrics.metric_name('fetch-size-avg', self.group_name, + 'The average number of bytes fetched per request'), Avg()) + self.bytes_fetched.add(metrics.metric_name('fetch-size-max', self.group_name, + 'The maximum number of bytes fetched per request'), Max()) + self.bytes_fetched.add(metrics.metric_name('bytes-consumed-rate', self.group_name, + 'The average number of bytes consumed per second'), Rate()) + + self.records_fetched = self.metrics.sensor('records-fetched') + self.records_fetched.add(metrics.metric_name('records-per-request-avg', self.group_name, + 'The average number of records in each request'), Avg()) + self.records_fetched.add(metrics.metric_name('records-consumed-rate', self.group_name, + 'The average number of records consumed per second'), Rate()) + + self.fetch_latency = metrics.sensor('fetch-latency') + self.fetch_latency.add(metrics.metric_name('fetch-latency-avg', self.group_name, + 'The average time taken for a fetch request.'), Avg()) + self.fetch_latency.add(metrics.metric_name('fetch-latency-max', self.group_name, + 'The max time taken for any fetch request.'), Max()) + self.fetch_latency.add(metrics.metric_name('fetch-rate', self.group_name, + 'The number of fetch requests per second.'), Rate(sampled_stat=Count())) + + self.records_fetch_lag = metrics.sensor('records-lag') + self.records_fetch_lag.add(metrics.metric_name('records-lag-max', self.group_name, + 'The maximum lag in terms of number of records for any partition in self window'), Max()) + + self.fetch_throttle_time_sensor = metrics.sensor('fetch-throttle-time') + self.fetch_throttle_time_sensor.add(metrics.metric_name('fetch-throttle-time-avg', self.group_name, + 'The average throttle time in ms'), Avg()) + self.fetch_throttle_time_sensor.add(metrics.metric_name('fetch-throttle-time-max', self.group_name, + 'The maximum throttle time in ms'), Max()) + + def record_topic_fetch_metrics(self, topic, num_bytes, num_records): + # record bytes fetched + name = '.'.join(['topic', topic, 'bytes-fetched']) + bytes_fetched = self.metrics.get_sensor(name) + if not bytes_fetched: + metric_tags = {'topic': topic.replace('.', '_')} + + bytes_fetched = self.metrics.sensor(name) + bytes_fetched.add(self.metrics.metric_name('fetch-size-avg', + self.group_name, + 'The average number of bytes fetched per request for topic %s' % (topic,), + metric_tags), Avg()) + bytes_fetched.add(self.metrics.metric_name('fetch-size-max', + self.group_name, + 'The maximum number of bytes fetched per request for topic %s' % (topic,), + metric_tags), Max()) + bytes_fetched.add(self.metrics.metric_name('bytes-consumed-rate', + self.group_name, + 'The average number of bytes consumed per second for topic %s' % (topic,), + metric_tags), Rate()) + bytes_fetched.record(num_bytes) + + # record records fetched + name = '.'.join(['topic', topic, 'records-fetched']) + records_fetched = self.metrics.get_sensor(name) + if not records_fetched: + metric_tags = {'topic': topic.replace('.', '_')} + + records_fetched = self.metrics.sensor(name) + records_fetched.add(self.metrics.metric_name('records-per-request-avg', + self.group_name, + 'The average number of records in each request for topic %s' % (topic,), + metric_tags), Avg()) + records_fetched.add(self.metrics.metric_name('records-consumed-rate', + self.group_name, + 'The average number of records consumed per second for topic %s' % (topic,), + metric_tags), Rate()) + records_fetched.record(num_records) diff --git a/testbed/dpkp__kafka-python/kafka/consumer/group.py b/testbed/dpkp__kafka-python/kafka/consumer/group.py new file mode 100644 index 0000000000000000000000000000000000000000..4fd57ae9c1460b34e038c8ed38951c420eea5b18 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/consumer/group.py @@ -0,0 +1,1223 @@ +from __future__ import absolute_import, division + +import copy +import logging +import socket +import time + +from kafka.errors import KafkaConfigurationError, UnsupportedVersionError + +from kafka.vendor import six + +from kafka.client_async import KafkaClient, selectors +from kafka.consumer.fetcher import Fetcher +from kafka.consumer.subscription_state import SubscriptionState +from kafka.coordinator.consumer import ConsumerCoordinator +from kafka.coordinator.assignors.range import RangePartitionAssignor +from kafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor +from kafka.metrics import MetricConfig, Metrics +from kafka.protocol.offset import OffsetResetStrategy +from kafka.structs import TopicPartition +from kafka.version import __version__ + +log = logging.getLogger(__name__) + + +class KafkaConsumer(six.Iterator): + """Consume records from a Kafka cluster. + + The consumer will transparently handle the failure of servers in the Kafka + cluster, and adapt as topic-partitions are created or migrate between + brokers. It also interacts with the assigned kafka Group Coordinator node + to allow multiple consumers to load balance consumption of topics (requires + kafka >= 0.9.0.0). + + The consumer is not thread safe and should not be shared across threads. + + Arguments: + *topics (str): optional list of topics to subscribe to. If not set, + call :meth:`~kafka.KafkaConsumer.subscribe` or + :meth:`~kafka.KafkaConsumer.assign` before consuming records. + + Keyword Arguments: + bootstrap_servers: 'host[:port]' string (or list of 'host[:port]' + strings) that the consumer should contact to bootstrap initial + cluster metadata. This does not have to be the full node list. + It just needs to have at least one broker that will respond to a + Metadata API Request. Default port is 9092. If no servers are + specified, will default to localhost:9092. + client_id (str): A name for this client. This string is passed in + each request to servers and can be used to identify specific + server-side log entries that correspond to this client. Also + submitted to GroupCoordinator for logging with respect to + consumer group administration. Default: 'kafka-python-{version}' + group_id (str or None): The name of the consumer group to join for dynamic + partition assignment (if enabled), and to use for fetching and + committing offsets. If None, auto-partition assignment (via + group coordinator) and offset commits are disabled. + Default: None + key_deserializer (callable): Any callable that takes a + raw message key and returns a deserialized key. + value_deserializer (callable): Any callable that takes a + raw message value and returns a deserialized value. + fetch_min_bytes (int): Minimum amount of data the server should + return for a fetch request, otherwise wait up to + fetch_max_wait_ms for more data to accumulate. Default: 1. + fetch_max_wait_ms (int): The maximum amount of time in milliseconds + the server will block before answering the fetch request if + there isn't sufficient data to immediately satisfy the + requirement given by fetch_min_bytes. Default: 500. + fetch_max_bytes (int): The maximum amount of data the server should + return for a fetch request. This is not an absolute maximum, if the + first message in the first non-empty partition of the fetch is + larger than this value, the message will still be returned to + ensure that the consumer can make progress. NOTE: consumer performs + fetches to multiple brokers in parallel so memory usage will depend + on the number of brokers containing partitions for the topic. + Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 MB). + max_partition_fetch_bytes (int): The maximum amount of data + per-partition the server will return. The maximum total memory + used for a request = #partitions * max_partition_fetch_bytes. + This size must be at least as large as the maximum message size + the server allows or else it is possible for the producer to + send messages larger than the consumer can fetch. If that + happens, the consumer can get stuck trying to fetch a large + message on a certain partition. Default: 1048576. + request_timeout_ms (int): Client request timeout in milliseconds. + Default: 305000. + retry_backoff_ms (int): Milliseconds to backoff when retrying on + errors. Default: 100. + reconnect_backoff_ms (int): The amount of time in milliseconds to + wait before attempting to reconnect to a given host. + Default: 50. + reconnect_backoff_max_ms (int): The maximum amount of time in + milliseconds to backoff/wait when reconnecting to a broker that has + repeatedly failed to connect. If provided, the backoff per host + will increase exponentially for each consecutive connection + failure, up to this maximum. Once the maximum is reached, + reconnection attempts will continue periodically with this fixed + rate. To avoid connection storms, a randomization factor of 0.2 + will be applied to the backoff resulting in a random range between + 20% below and 20% above the computed value. Default: 1000. + max_in_flight_requests_per_connection (int): Requests are pipelined + to kafka brokers up to this number of maximum requests per + broker connection. Default: 5. + auto_offset_reset (str): A policy for resetting offsets on + OffsetOutOfRange errors: 'earliest' will move to the oldest + available message, 'latest' will move to the most recent. Any + other value will raise the exception. Default: 'latest'. + enable_auto_commit (bool): If True , the consumer's offset will be + periodically committed in the background. Default: True. + auto_commit_interval_ms (int): Number of milliseconds between automatic + offset commits, if enable_auto_commit is True. Default: 5000. + default_offset_commit_callback (callable): Called as + callback(offsets, response) response will be either an Exception + or an OffsetCommitResponse struct. This callback can be used to + trigger custom actions when a commit request completes. + check_crcs (bool): Automatically check the CRC32 of the records + consumed. This ensures no on-the-wire or on-disk corruption to + the messages occurred. This check adds some overhead, so it may + be disabled in cases seeking extreme performance. Default: True + metadata_max_age_ms (int): The period of time in milliseconds after + which we force a refresh of metadata, even if we haven't seen any + partition leadership changes to proactively discover any new + brokers or partitions. Default: 300000 + partition_assignment_strategy (list): List of objects to use to + distribute partition ownership amongst consumer instances when + group management is used. + Default: [RangePartitionAssignor, RoundRobinPartitionAssignor] + max_poll_records (int): The maximum number of records returned in a + single call to :meth:`~kafka.KafkaConsumer.poll`. Default: 500 + max_poll_interval_ms (int): The maximum delay between invocations of + :meth:`~kafka.KafkaConsumer.poll` when using consumer group + management. This places an upper bound on the amount of time that + the consumer can be idle before fetching more records. If + :meth:`~kafka.KafkaConsumer.poll` is not called before expiration + of this timeout, then the consumer is considered failed and the + group will rebalance in order to reassign the partitions to another + member. Default 300000 + session_timeout_ms (int): The timeout used to detect failures when + using Kafka's group management facilities. The consumer sends + periodic heartbeats to indicate its liveness to the broker. If + no heartbeats are received by the broker before the expiration of + this session timeout, then the broker will remove this consumer + from the group and initiate a rebalance. Note that the value must + be in the allowable range as configured in the broker configuration + by group.min.session.timeout.ms and group.max.session.timeout.ms. + Default: 10000 + heartbeat_interval_ms (int): The expected time in milliseconds + between heartbeats to the consumer coordinator when using + Kafka's group management facilities. Heartbeats are used to ensure + that the consumer's session stays active and to facilitate + rebalancing when new consumers join or leave the group. The + value must be set lower than session_timeout_ms, but typically + should be set no higher than 1/3 of that value. It can be + adjusted even lower to control the expected time for normal + rebalances. Default: 3000 + receive_buffer_bytes (int): The size of the TCP receive buffer + (SO_RCVBUF) to use when reading data. Default: None (relies on + system defaults). The java client defaults to 32768. + send_buffer_bytes (int): The size of the TCP send buffer + (SO_SNDBUF) to use when sending data. Default: None (relies on + system defaults). The java client defaults to 131072. + socket_options (list): List of tuple-arguments to socket.setsockopt + to apply to broker connection sockets. Default: + [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] + consumer_timeout_ms (int): number of milliseconds to block during + message iteration before raising StopIteration (i.e., ending the + iterator). Default block forever [float('inf')]. + security_protocol (str): Protocol used to communicate with brokers. + Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT, SASL_SSL. + Default: PLAINTEXT. + ssl_context (ssl.SSLContext): Pre-configured SSLContext for wrapping + socket connections. If provided, all other ssl_* configurations + will be ignored. Default: None. + ssl_check_hostname (bool): Flag to configure whether ssl handshake + should verify that the certificate matches the brokers hostname. + Default: True. + ssl_cafile (str): Optional filename of ca file to use in certificate + verification. Default: None. + ssl_certfile (str): Optional filename of file in pem format containing + the client certificate, as well as any ca certificates needed to + establish the certificate's authenticity. Default: None. + ssl_keyfile (str): Optional filename containing the client private key. + Default: None. + ssl_password (str): Optional password to be used when loading the + certificate chain. Default: None. + ssl_crlfile (str): Optional filename containing the CRL to check for + certificate expiration. By default, no CRL check is done. When + providing a file, only the leaf certificate will be checked against + this CRL. The CRL can only be checked with Python 3.4+ or 2.7.9+. + Default: None. + ssl_ciphers (str): optionally set the available ciphers for ssl + connections. It should be a string in the OpenSSL cipher list + format. If no cipher can be selected (because compile-time options + or other configuration forbids use of all the specified ciphers), + an ssl.SSLError will be raised. See ssl.SSLContext.set_ciphers + api_version (tuple): Specify which Kafka API version to use. If set to + None, the client will attempt to infer the broker version by probing + various APIs. Different versions enable different functionality. + + Examples: + (0, 9) enables full group coordination features with automatic + partition assignment and rebalancing, + (0, 8, 2) enables kafka-storage offset commits with manual + partition assignment only, + (0, 8, 1) enables zookeeper-storage offset commits with manual + partition assignment only, + (0, 8, 0) enables basic functionality but requires manual + partition assignment and offset management. + + Default: None + api_version_auto_timeout_ms (int): number of milliseconds to throw a + timeout exception from the constructor when checking the broker + api version. Only applies if api_version set to None. + connections_max_idle_ms: Close idle connections after the number of + milliseconds specified by this config. The broker closes idle + connections after connections.max.idle.ms, so this avoids hitting + unexpected socket disconnected errors on the client. + Default: 540000 + metric_reporters (list): A list of classes to use as metrics reporters. + Implementing the AbstractMetricsReporter interface allows plugging + in classes that will be notified of new metric creation. Default: [] + metrics_num_samples (int): The number of samples maintained to compute + metrics. Default: 2 + metrics_sample_window_ms (int): The maximum age in milliseconds of + samples used to compute metrics. Default: 30000 + selector (selectors.BaseSelector): Provide a specific selector + implementation to use for I/O multiplexing. + Default: selectors.DefaultSelector + exclude_internal_topics (bool): Whether records from internal topics + (such as offsets) should be exposed to the consumer. If set to True + the only way to receive records from an internal topic is + subscribing to it. Requires 0.10+ Default: True + sasl_mechanism (str): Authentication mechanism when security_protocol + is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: + PLAIN, GSSAPI, OAUTHBEARER, SCRAM-SHA-256, SCRAM-SHA-512. + sasl_plain_username (str): username for sasl PLAIN and SCRAM authentication. + Required if sasl_mechanism is PLAIN or one of the SCRAM mechanisms. + sasl_plain_password (str): password for sasl PLAIN and SCRAM authentication. + Required if sasl_mechanism is PLAIN or one of the SCRAM mechanisms. + sasl_kerberos_service_name (str): Service name to include in GSSAPI + sasl mechanism handshake. Default: 'kafka' + sasl_kerberos_domain_name (str): kerberos domain name to use in GSSAPI + sasl mechanism handshake. Default: one of bootstrap servers + sasl_oauth_token_provider (AbstractTokenProvider): OAuthBearer token provider + instance. (See kafka.oauth.abstract). Default: None + + Note: + Configuration parameters are described in more detail at + https://kafka.apache.org/documentation/#consumerconfigs + """ + DEFAULT_CONFIG = { + 'bootstrap_servers': 'localhost', + 'client_id': 'kafka-python-' + __version__, + 'group_id': None, + 'key_deserializer': None, + 'value_deserializer': None, + 'fetch_max_wait_ms': 500, + 'fetch_min_bytes': 1, + 'fetch_max_bytes': 52428800, + 'max_partition_fetch_bytes': 1 * 1024 * 1024, + 'request_timeout_ms': 305000, # chosen to be higher than the default of max_poll_interval_ms + 'retry_backoff_ms': 100, + 'reconnect_backoff_ms': 50, + 'reconnect_backoff_max_ms': 1000, + 'max_in_flight_requests_per_connection': 5, + 'auto_offset_reset': 'latest', + 'enable_auto_commit': True, + 'auto_commit_interval_ms': 5000, + 'default_offset_commit_callback': lambda offsets, response: True, + 'check_crcs': True, + 'metadata_max_age_ms': 5 * 60 * 1000, + 'partition_assignment_strategy': (RangePartitionAssignor, RoundRobinPartitionAssignor), + 'max_poll_records': 500, + 'max_poll_interval_ms': 300000, + 'session_timeout_ms': 10000, + 'heartbeat_interval_ms': 3000, + 'receive_buffer_bytes': None, + 'send_buffer_bytes': None, + 'socket_options': [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)], + 'sock_chunk_bytes': 4096, # undocumented experimental option + 'sock_chunk_buffer_count': 1000, # undocumented experimental option + 'consumer_timeout_ms': float('inf'), + 'security_protocol': 'PLAINTEXT', + 'ssl_context': None, + 'ssl_check_hostname': True, + 'ssl_cafile': None, + 'ssl_certfile': None, + 'ssl_keyfile': None, + 'ssl_crlfile': None, + 'ssl_password': None, + 'ssl_ciphers': None, + 'api_version': None, + 'api_version_auto_timeout_ms': 2000, + 'connections_max_idle_ms': 9 * 60 * 1000, + 'metric_reporters': [], + 'metrics_num_samples': 2, + 'metrics_sample_window_ms': 30000, + 'metric_group_prefix': 'consumer', + 'selector': selectors.DefaultSelector, + 'exclude_internal_topics': True, + 'sasl_mechanism': None, + 'sasl_plain_username': None, + 'sasl_plain_password': None, + 'sasl_kerberos_service_name': 'kafka', + 'sasl_kerberos_domain_name': None, + 'sasl_oauth_token_provider': None, + 'legacy_iterator': False, # enable to revert to < 1.4.7 iterator + } + DEFAULT_SESSION_TIMEOUT_MS_0_9 = 30000 + + def __init__(self, *topics, **configs): + # Only check for extra config keys in top-level class + extra_configs = set(configs).difference(self.DEFAULT_CONFIG) + if extra_configs: + raise KafkaConfigurationError("Unrecognized configs: %s" % (extra_configs,)) + + self.config = copy.copy(self.DEFAULT_CONFIG) + self.config.update(configs) + + deprecated = {'smallest': 'earliest', 'largest': 'latest'} + if self.config['auto_offset_reset'] in deprecated: + new_config = deprecated[self.config['auto_offset_reset']] + log.warning('use auto_offset_reset=%s (%s is deprecated)', + new_config, self.config['auto_offset_reset']) + self.config['auto_offset_reset'] = new_config + + connections_max_idle_ms = self.config['connections_max_idle_ms'] + request_timeout_ms = self.config['request_timeout_ms'] + fetch_max_wait_ms = self.config['fetch_max_wait_ms'] + if not (fetch_max_wait_ms < request_timeout_ms < connections_max_idle_ms): + raise KafkaConfigurationError( + "connections_max_idle_ms ({}) must be larger than " + "request_timeout_ms ({}) which must be larger than " + "fetch_max_wait_ms ({})." + .format(connections_max_idle_ms, request_timeout_ms, fetch_max_wait_ms)) + + metrics_tags = {'client-id': self.config['client_id']} + metric_config = MetricConfig(samples=self.config['metrics_num_samples'], + time_window_ms=self.config['metrics_sample_window_ms'], + tags=metrics_tags) + reporters = [reporter() for reporter in self.config['metric_reporters']] + self._metrics = Metrics(metric_config, reporters) + # TODO _metrics likely needs to be passed to KafkaClient, etc. + + # api_version was previously a str. Accept old format for now + if isinstance(self.config['api_version'], str): + str_version = self.config['api_version'] + if str_version == 'auto': + self.config['api_version'] = None + else: + self.config['api_version'] = tuple(map(int, str_version.split('.'))) + log.warning('use api_version=%s [tuple] -- "%s" as str is deprecated', + str(self.config['api_version']), str_version) + + self._client = KafkaClient(metrics=self._metrics, **self.config) + + # Get auto-discovered version from client if necessary + if self.config['api_version'] is None: + self.config['api_version'] = self._client.config['api_version'] + + # Coordinator configurations are different for older brokers + # max_poll_interval_ms is not supported directly -- it must the be + # the same as session_timeout_ms. If the user provides one of them, + # use it for both. Otherwise use the old default of 30secs + if self.config['api_version'] < (0, 10, 1): + if 'session_timeout_ms' not in configs: + if 'max_poll_interval_ms' in configs: + self.config['session_timeout_ms'] = configs['max_poll_interval_ms'] + else: + self.config['session_timeout_ms'] = self.DEFAULT_SESSION_TIMEOUT_MS_0_9 + if 'max_poll_interval_ms' not in configs: + self.config['max_poll_interval_ms'] = self.config['session_timeout_ms'] + + if self.config['group_id'] is not None: + if self.config['request_timeout_ms'] <= self.config['session_timeout_ms']: + raise KafkaConfigurationError( + "Request timeout (%s) must be larger than session timeout (%s)" % + (self.config['request_timeout_ms'], self.config['session_timeout_ms'])) + + self._subscription = SubscriptionState(self.config['auto_offset_reset']) + self._fetcher = Fetcher( + self._client, self._subscription, self._metrics, **self.config) + self._coordinator = ConsumerCoordinator( + self._client, self._subscription, self._metrics, + assignors=self.config['partition_assignment_strategy'], + **self.config) + self._closed = False + self._iterator = None + self._consumer_timeout = float('inf') + + if topics: + self._subscription.subscribe(topics=topics) + self._client.set_topics(topics) + + def bootstrap_connected(self): + """Return True if the bootstrap is connected.""" + return self._client.bootstrap_connected() + + def assign(self, partitions): + """Manually assign a list of TopicPartitions to this consumer. + + Arguments: + partitions (list of TopicPartition): Assignment for this instance. + + Raises: + IllegalStateError: If consumer has already called + :meth:`~kafka.KafkaConsumer.subscribe`. + + Warning: + It is not possible to use both manual partition assignment with + :meth:`~kafka.KafkaConsumer.assign` and group assignment with + :meth:`~kafka.KafkaConsumer.subscribe`. + + Note: + This interface does not support incremental assignment and will + replace the previous assignment (if there was one). + + Note: + Manual topic assignment through this method does not use the + consumer's group management functionality. As such, there will be + no rebalance operation triggered when group membership or cluster + and topic metadata change. + """ + self._subscription.assign_from_user(partitions) + self._client.set_topics([tp.topic for tp in partitions]) + + def assignment(self): + """Get the TopicPartitions currently assigned to this consumer. + + If partitions were directly assigned using + :meth:`~kafka.KafkaConsumer.assign`, then this will simply return the + same partitions that were previously assigned. If topics were + subscribed using :meth:`~kafka.KafkaConsumer.subscribe`, then this will + give the set of topic partitions currently assigned to the consumer + (which may be None if the assignment hasn't happened yet, or if the + partitions are in the process of being reassigned). + + Returns: + set: {TopicPartition, ...} + """ + return self._subscription.assigned_partitions() + + def close(self, autocommit=True): + """Close the consumer, waiting indefinitely for any needed cleanup. + + Keyword Arguments: + autocommit (bool): If auto-commit is configured for this consumer, + this optional flag causes the consumer to attempt to commit any + pending consumed offsets prior to close. Default: True + """ + if self._closed: + return + log.debug("Closing the KafkaConsumer.") + self._closed = True + self._coordinator.close(autocommit=autocommit) + self._metrics.close() + self._client.close() + try: + self.config['key_deserializer'].close() + except AttributeError: + pass + try: + self.config['value_deserializer'].close() + except AttributeError: + pass + log.debug("The KafkaConsumer has closed.") + + def commit_async(self, offsets=None, callback=None): + """Commit offsets to kafka asynchronously, optionally firing callback. + + This commits offsets only to Kafka. The offsets committed using this API + will be used on the first fetch after every rebalance and also on + startup. As such, if you need to store offsets in anything other than + Kafka, this API should not be used. To avoid re-processing the last + message read if a consumer is restarted, the committed offset should be + the next message your application should consume, i.e.: last_offset + 1. + + This is an asynchronous call and will not block. Any errors encountered + are either passed to the callback (if provided) or discarded. + + Arguments: + offsets (dict, optional): {TopicPartition: OffsetAndMetadata} dict + to commit with the configured group_id. Defaults to currently + consumed offsets for all subscribed partitions. + callback (callable, optional): Called as callback(offsets, response) + with response as either an Exception or an OffsetCommitResponse + struct. This callback can be used to trigger custom actions when + a commit request completes. + + Returns: + kafka.future.Future + """ + assert self.config['api_version'] >= (0, 8, 1), 'Requires >= Kafka 0.8.1' + assert self.config['group_id'] is not None, 'Requires group_id' + if offsets is None: + offsets = self._subscription.all_consumed_offsets() + log.debug("Committing offsets: %s", offsets) + future = self._coordinator.commit_offsets_async( + offsets, callback=callback) + return future + + def commit(self, offsets=None): + """Commit offsets to kafka, blocking until success or error. + + This commits offsets only to Kafka. The offsets committed using this API + will be used on the first fetch after every rebalance and also on + startup. As such, if you need to store offsets in anything other than + Kafka, this API should not be used. To avoid re-processing the last + message read if a consumer is restarted, the committed offset should be + the next message your application should consume, i.e.: last_offset + 1. + + Blocks until either the commit succeeds or an unrecoverable error is + encountered (in which case it is thrown to the caller). + + Currently only supports kafka-topic offset storage (not zookeeper). + + Arguments: + offsets (dict, optional): {TopicPartition: OffsetAndMetadata} dict + to commit with the configured group_id. Defaults to currently + consumed offsets for all subscribed partitions. + """ + assert self.config['api_version'] >= (0, 8, 1), 'Requires >= Kafka 0.8.1' + assert self.config['group_id'] is not None, 'Requires group_id' + if offsets is None: + offsets = self._subscription.all_consumed_offsets() + self._coordinator.commit_offsets_sync(offsets) + + def committed(self, partition, metadata=False): + """Get the last committed offset for the given partition. + + This offset will be used as the position for the consumer + in the event of a failure. + + This call may block to do a remote call if the partition in question + isn't assigned to this consumer or if the consumer hasn't yet + initialized its cache of committed offsets. + + Arguments: + partition (TopicPartition): The partition to check. + metadata (bool, optional): If True, return OffsetAndMetadata struct + instead of offset int. Default: False. + + Returns: + The last committed offset (int or OffsetAndMetadata), or None if there was no prior commit. + """ + assert self.config['api_version'] >= (0, 8, 1), 'Requires >= Kafka 0.8.1' + assert self.config['group_id'] is not None, 'Requires group_id' + if not isinstance(partition, TopicPartition): + raise TypeError('partition must be a TopicPartition namedtuple') + if self._subscription.is_assigned(partition): + committed = self._subscription.assignment[partition].committed + if committed is None: + self._coordinator.refresh_committed_offsets_if_needed() + committed = self._subscription.assignment[partition].committed + else: + commit_map = self._coordinator.fetch_committed_offsets([partition]) + if partition in commit_map: + committed = commit_map[partition] + else: + committed = None + + if committed is not None: + if metadata: + return committed + else: + return committed.offset + + def _fetch_all_topic_metadata(self): + """A blocking call that fetches topic metadata for all topics in the + cluster that the user is authorized to view. + """ + cluster = self._client.cluster + if self._client._metadata_refresh_in_progress and self._client._topics: + future = cluster.request_update() + self._client.poll(future=future) + stash = cluster.need_all_topic_metadata + cluster.need_all_topic_metadata = True + future = cluster.request_update() + self._client.poll(future=future) + cluster.need_all_topic_metadata = stash + + def topics(self): + """Get all topics the user is authorized to view. + This will always issue a remote call to the cluster to fetch the latest + information. + + Returns: + set: topics + """ + self._fetch_all_topic_metadata() + return self._client.cluster.topics() + + def partitions_for_topic(self, topic): + """This method first checks the local metadata cache for information + about the topic. If the topic is not found (either because the topic + does not exist, the user is not authorized to view the topic, or the + metadata cache is not populated), then it will issue a metadata update + call to the cluster. + + Arguments: + topic (str): Topic to check. + + Returns: + set: Partition ids + """ + cluster = self._client.cluster + partitions = cluster.partitions_for_topic(topic) + if partitions is None: + self._fetch_all_topic_metadata() + partitions = cluster.partitions_for_topic(topic) + return partitions + + def poll(self, timeout_ms=0, max_records=None, update_offsets=True): + """Fetch data from assigned topics / partitions. + + Records are fetched and returned in batches by topic-partition. + On each poll, consumer will try to use the last consumed offset as the + starting offset and fetch sequentially. The last consumed offset can be + manually set through :meth:`~kafka.KafkaConsumer.seek` or automatically + set as the last committed offset for the subscribed list of partitions. + + Incompatible with iterator interface -- use one or the other, not both. + + Arguments: + timeout_ms (int, optional): Milliseconds spent waiting in poll if + data is not available in the buffer. If 0, returns immediately + with any records that are available currently in the buffer, + else returns empty. Must not be negative. Default: 0 + max_records (int, optional): The maximum number of records returned + in a single call to :meth:`~kafka.KafkaConsumer.poll`. + Default: Inherit value from max_poll_records. + + Returns: + dict: Topic to list of records since the last fetch for the + subscribed list of topics and partitions. + """ + # Note: update_offsets is an internal-use only argument. It is used to + # support the python iterator interface, and which wraps consumer.poll() + # and requires that the partition offsets tracked by the fetcher are not + # updated until the iterator returns each record to the user. As such, + # the argument is not documented and should not be relied on by library + # users to not break in the future. + assert timeout_ms >= 0, 'Timeout must not be negative' + if max_records is None: + max_records = self.config['max_poll_records'] + assert isinstance(max_records, int), 'max_records must be an integer' + assert max_records > 0, 'max_records must be positive' + assert not self._closed, 'KafkaConsumer is closed' + + # Poll for new data until the timeout expires + start = time.time() + remaining = timeout_ms + while not self._closed: + records = self._poll_once(remaining, max_records, update_offsets=update_offsets) + if records: + return records + + elapsed_ms = (time.time() - start) * 1000 + remaining = timeout_ms - elapsed_ms + + if remaining <= 0: + break + + return {} + + def _poll_once(self, timeout_ms, max_records, update_offsets=True): + """Do one round of polling. In addition to checking for new data, this does + any needed heart-beating, auto-commits, and offset updates. + + Arguments: + timeout_ms (int): The maximum time in milliseconds to block. + + Returns: + dict: Map of topic to list of records (may be empty). + """ + self._coordinator.poll() + + # Fetch positions if we have partitions we're subscribed to that we + # don't know the offset for + if not self._subscription.has_all_fetch_positions(): + self._update_fetch_positions(self._subscription.missing_fetch_positions()) + + # If data is available already, e.g. from a previous network client + # poll() call to commit, then just return it immediately + records, partial = self._fetcher.fetched_records(max_records, update_offsets=update_offsets) + if records: + # Before returning the fetched records, we can send off the + # next round of fetches and avoid block waiting for their + # responses to enable pipelining while the user is handling the + # fetched records. + if not partial: + futures = self._fetcher.send_fetches() + if len(futures): + self._client.poll(timeout_ms=0) + return records + + # Send any new fetches (won't resend pending fetches) + futures = self._fetcher.send_fetches() + if len(futures): + self._client.poll(timeout_ms=0) + + timeout_ms = min(timeout_ms, self._coordinator.time_to_next_poll() * 1000) + self._client.poll(timeout_ms=timeout_ms) + # after the long poll, we should check whether the group needs to rebalance + # prior to returning data so that the group can stabilize faster + if self._coordinator.need_rejoin(): + return {} + + records, _ = self._fetcher.fetched_records(max_records, update_offsets=update_offsets) + return records + + def position(self, partition): + """Get the offset of the next record that will be fetched + + Arguments: + partition (TopicPartition): Partition to check + + Returns: + int: Offset + """ + if not isinstance(partition, TopicPartition): + raise TypeError('partition must be a TopicPartition namedtuple') + assert self._subscription.is_assigned(partition), 'Partition is not assigned' + offset = self._subscription.assignment[partition].position + if offset is None: + self._update_fetch_positions([partition]) + offset = self._subscription.assignment[partition].position + return offset + + def highwater(self, partition): + """Last known highwater offset for a partition. + + A highwater offset is the offset that will be assigned to the next + message that is produced. It may be useful for calculating lag, by + comparing with the reported position. Note that both position and + highwater refer to the *next* offset -- i.e., highwater offset is + one greater than the newest available message. + + Highwater offsets are returned in FetchResponse messages, so will + not be available if no FetchRequests have been sent for this partition + yet. + + Arguments: + partition (TopicPartition): Partition to check + + Returns: + int or None: Offset if available + """ + if not isinstance(partition, TopicPartition): + raise TypeError('partition must be a TopicPartition namedtuple') + assert self._subscription.is_assigned(partition), 'Partition is not assigned' + return self._subscription.assignment[partition].highwater + + def pause(self, *partitions): + """Suspend fetching from the requested partitions. + + Future calls to :meth:`~kafka.KafkaConsumer.poll` will not return any + records from these partitions until they have been resumed using + :meth:`~kafka.KafkaConsumer.resume`. + + Note: This method does not affect partition subscription. In particular, + it does not cause a group rebalance when automatic assignment is used. + + Arguments: + *partitions (TopicPartition): Partitions to pause. + """ + if not all([isinstance(p, TopicPartition) for p in partitions]): + raise TypeError('partitions must be TopicPartition namedtuples') + for partition in partitions: + log.debug("Pausing partition %s", partition) + self._subscription.pause(partition) + # Because the iterator checks is_fetchable() on each iteration + # we expect pauses to get handled automatically and therefore + # we do not need to reset the full iterator (forcing a full refetch) + + def paused(self): + """Get the partitions that were previously paused using + :meth:`~kafka.KafkaConsumer.pause`. + + Returns: + set: {partition (TopicPartition), ...} + """ + return self._subscription.paused_partitions() + + def resume(self, *partitions): + """Resume fetching from the specified (paused) partitions. + + Arguments: + *partitions (TopicPartition): Partitions to resume. + """ + if not all([isinstance(p, TopicPartition) for p in partitions]): + raise TypeError('partitions must be TopicPartition namedtuples') + for partition in partitions: + log.debug("Resuming partition %s", partition) + self._subscription.resume(partition) + + def seek(self, partition, offset): + """Manually specify the fetch offset for a TopicPartition. + + Overrides the fetch offsets that the consumer will use on the next + :meth:`~kafka.KafkaConsumer.poll`. If this API is invoked for the same + partition more than once, the latest offset will be used on the next + :meth:`~kafka.KafkaConsumer.poll`. + + Note: You may lose data if this API is arbitrarily used in the middle of + consumption to reset the fetch offsets. + + Arguments: + partition (TopicPartition): Partition for seek operation + offset (int): Message offset in partition + + Raises: + AssertionError: If offset is not an int >= 0; or if partition is not + currently assigned. + """ + if not isinstance(partition, TopicPartition): + raise TypeError('partition must be a TopicPartition namedtuple') + assert isinstance(offset, int) and offset >= 0, 'Offset must be >= 0' + assert partition in self._subscription.assigned_partitions(), 'Unassigned partition' + log.debug("Seeking to offset %s for partition %s", offset, partition) + self._subscription.assignment[partition].seek(offset) + if not self.config['legacy_iterator']: + self._iterator = None + + def seek_to_beginning(self, *partitions): + """Seek to the oldest available offset for partitions. + + Arguments: + *partitions: Optionally provide specific TopicPartitions, otherwise + default to all assigned partitions. + + Raises: + AssertionError: If any partition is not currently assigned, or if + no partitions are assigned. + """ + if not all([isinstance(p, TopicPartition) for p in partitions]): + raise TypeError('partitions must be TopicPartition namedtuples') + if not partitions: + partitions = self._subscription.assigned_partitions() + assert partitions, 'No partitions are currently assigned' + else: + for p in partitions: + assert p in self._subscription.assigned_partitions(), 'Unassigned partition' + + for tp in partitions: + log.debug("Seeking to beginning of partition %s", tp) + self._subscription.need_offset_reset(tp, OffsetResetStrategy.EARLIEST) + if not self.config['legacy_iterator']: + self._iterator = None + + def seek_to_end(self, *partitions): + """Seek to the most recent available offset for partitions. + + Arguments: + *partitions: Optionally provide specific TopicPartitions, otherwise + default to all assigned partitions. + + Raises: + AssertionError: If any partition is not currently assigned, or if + no partitions are assigned. + """ + if not all([isinstance(p, TopicPartition) for p in partitions]): + raise TypeError('partitions must be TopicPartition namedtuples') + if not partitions: + partitions = self._subscription.assigned_partitions() + assert partitions, 'No partitions are currently assigned' + else: + for p in partitions: + assert p in self._subscription.assigned_partitions(), 'Unassigned partition' + + for tp in partitions: + log.debug("Seeking to end of partition %s", tp) + self._subscription.need_offset_reset(tp, OffsetResetStrategy.LATEST) + if not self.config['legacy_iterator']: + self._iterator = None + + def subscribe(self, topics=(), pattern=None, listener=None): + """Subscribe to a list of topics, or a topic regex pattern. + + Partitions will be dynamically assigned via a group coordinator. + Topic subscriptions are not incremental: this list will replace the + current assignment (if there is one). + + This method is incompatible with :meth:`~kafka.KafkaConsumer.assign`. + + Arguments: + topics (list): List of topics for subscription. + pattern (str): Pattern to match available topics. You must provide + either topics or pattern, but not both. + listener (ConsumerRebalanceListener): Optionally include listener + callback, which will be called before and after each rebalance + operation. + + As part of group management, the consumer will keep track of the + list of consumers that belong to a particular group and will + trigger a rebalance operation if one of the following events + trigger: + + * Number of partitions change for any of the subscribed topics + * Topic is created or deleted + * An existing member of the consumer group dies + * A new member is added to the consumer group + + When any of these events are triggered, the provided listener + will be invoked first to indicate that the consumer's assignment + has been revoked, and then again when the new assignment has + been received. Note that this listener will immediately override + any listener set in a previous call to subscribe. It is + guaranteed, however, that the partitions revoked/assigned + through this interface are from topics subscribed in this call. + + Raises: + IllegalStateError: If called after previously calling + :meth:`~kafka.KafkaConsumer.assign`. + AssertionError: If neither topics or pattern is provided. + TypeError: If listener is not a ConsumerRebalanceListener. + """ + # SubscriptionState handles error checking + self._subscription.subscribe(topics=topics, + pattern=pattern, + listener=listener) + + # Regex will need all topic metadata + if pattern is not None: + self._client.cluster.need_all_topic_metadata = True + self._client.set_topics([]) + self._client.cluster.request_update() + log.debug("Subscribed to topic pattern: %s", pattern) + else: + self._client.cluster.need_all_topic_metadata = False + self._client.set_topics(self._subscription.group_subscription()) + log.debug("Subscribed to topic(s): %s", topics) + + def subscription(self): + """Get the current topic subscription. + + Returns: + set: {topic, ...} + """ + if self._subscription.subscription is None: + return None + return self._subscription.subscription.copy() + + def unsubscribe(self): + """Unsubscribe from all topics and clear all assigned partitions.""" + self._subscription.unsubscribe() + self._coordinator.close() + self._client.cluster.need_all_topic_metadata = False + self._client.set_topics([]) + log.debug("Unsubscribed all topics or patterns and assigned partitions") + if not self.config['legacy_iterator']: + self._iterator = None + + def metrics(self, raw=False): + """Get metrics on consumer performance. + + This is ported from the Java Consumer, for details see: + https://kafka.apache.org/documentation/#consumer_monitoring + + Warning: + This is an unstable interface. It may change in future + releases without warning. + """ + if raw: + return self._metrics.metrics.copy() + + metrics = {} + for k, v in six.iteritems(self._metrics.metrics.copy()): + if k.group not in metrics: + metrics[k.group] = {} + if k.name not in metrics[k.group]: + metrics[k.group][k.name] = {} + metrics[k.group][k.name] = v.value() + return metrics + + def offsets_for_times(self, timestamps): + """Look up the offsets for the given partitions by timestamp. The + returned offset for each partition is the earliest offset whose + timestamp is greater than or equal to the given timestamp in the + corresponding partition. + + This is a blocking call. The consumer does not have to be assigned the + partitions. + + If the message format version in a partition is before 0.10.0, i.e. + the messages do not have timestamps, ``None`` will be returned for that + partition. ``None`` will also be returned for the partition if there + are no messages in it. + + Note: + This method may block indefinitely if the partition does not exist. + + Arguments: + timestamps (dict): ``{TopicPartition: int}`` mapping from partition + to the timestamp to look up. Unit should be milliseconds since + beginning of the epoch (midnight Jan 1, 1970 (UTC)) + + Returns: + ``{TopicPartition: OffsetAndTimestamp}``: mapping from partition + to the timestamp and offset of the first message with timestamp + greater than or equal to the target timestamp. + + Raises: + ValueError: If the target timestamp is negative + UnsupportedVersionError: If the broker does not support looking + up the offsets by timestamp. + KafkaTimeoutError: If fetch failed in request_timeout_ms + """ + if self.config['api_version'] <= (0, 10, 0): + raise UnsupportedVersionError( + "offsets_for_times API not supported for cluster version {}" + .format(self.config['api_version'])) + for tp, ts in six.iteritems(timestamps): + timestamps[tp] = int(ts) + if ts < 0: + raise ValueError( + "The target time for partition {} is {}. The target time " + "cannot be negative.".format(tp, ts)) + return self._fetcher.get_offsets_by_times( + timestamps, self.config['request_timeout_ms']) + + def beginning_offsets(self, partitions): + """Get the first offset for the given partitions. + + This method does not change the current consumer position of the + partitions. + + Note: + This method may block indefinitely if the partition does not exist. + + Arguments: + partitions (list): List of TopicPartition instances to fetch + offsets for. + + Returns: + ``{TopicPartition: int}``: The earliest available offsets for the + given partitions. + + Raises: + UnsupportedVersionError: If the broker does not support looking + up the offsets by timestamp. + KafkaTimeoutError: If fetch failed in request_timeout_ms. + """ + offsets = self._fetcher.beginning_offsets( + partitions, self.config['request_timeout_ms']) + return offsets + + def end_offsets(self, partitions): + """Get the last offset for the given partitions. The last offset of a + partition is the offset of the upcoming message, i.e. the offset of the + last available message + 1. + + This method does not change the current consumer position of the + partitions. + + Note: + This method may block indefinitely if the partition does not exist. + + Arguments: + partitions (list): List of TopicPartition instances to fetch + offsets for. + + Returns: + ``{TopicPartition: int}``: The end offsets for the given partitions. + + Raises: + UnsupportedVersionError: If the broker does not support looking + up the offsets by timestamp. + KafkaTimeoutError: If fetch failed in request_timeout_ms + """ + offsets = self._fetcher.end_offsets( + partitions, self.config['request_timeout_ms']) + return offsets + + def _use_consumer_group(self): + """Return True iff this consumer can/should join a broker-coordinated group.""" + if self.config['api_version'] < (0, 9): + return False + elif self.config['group_id'] is None: + return False + elif not self._subscription.partitions_auto_assigned(): + return False + return True + + def _update_fetch_positions(self, partitions): + """Set the fetch position to the committed position (if there is one) + or reset it using the offset reset policy the user has configured. + + Arguments: + partitions (List[TopicPartition]): The partitions that need + updating fetch positions. + + Raises: + NoOffsetForPartitionError: If no offset is stored for a given + partition and no offset reset policy is defined. + """ + # Lookup any positions for partitions which are awaiting reset (which may be the + # case if the user called :meth:`seek_to_beginning` or :meth:`seek_to_end`. We do + # this check first to avoid an unnecessary lookup of committed offsets (which + # typically occurs when the user is manually assigning partitions and managing + # their own offsets). + self._fetcher.reset_offsets_if_needed(partitions) + + if not self._subscription.has_all_fetch_positions(): + # if we still don't have offsets for all partitions, then we should either seek + # to the last committed position or reset using the auto reset policy + if (self.config['api_version'] >= (0, 8, 1) and + self.config['group_id'] is not None): + # first refresh commits for all assigned partitions + self._coordinator.refresh_committed_offsets_if_needed() + + # Then, do any offset lookups in case some positions are not known + self._fetcher.update_fetch_positions(partitions) + + def _message_generator_v2(self): + timeout_ms = 1000 * (self._consumer_timeout - time.time()) + record_map = self.poll(timeout_ms=timeout_ms, update_offsets=False) + for tp, records in six.iteritems(record_map): + # Generators are stateful, and it is possible that the tp / records + # here may become stale during iteration -- i.e., we seek to a + # different offset, pause consumption, or lose assignment. + for record in records: + # is_fetchable(tp) should handle assignment changes and offset + # resets; for all other changes (e.g., seeks) we'll rely on the + # outer function destroying the existing iterator/generator + # via self._iterator = None + if not self._subscription.is_fetchable(tp): + log.debug("Not returning fetched records for partition %s" + " since it is no longer fetchable", tp) + break + self._subscription.assignment[tp].position = record.offset + 1 + yield record + + def _message_generator(self): + assert self.assignment() or self.subscription() is not None, 'No topic subscription or manual partition assignment' + while time.time() < self._consumer_timeout: + + self._coordinator.poll() + + # Fetch offsets for any subscribed partitions that we arent tracking yet + if not self._subscription.has_all_fetch_positions(): + partitions = self._subscription.missing_fetch_positions() + self._update_fetch_positions(partitions) + + poll_ms = min((1000 * (self._consumer_timeout - time.time())), self.config['retry_backoff_ms']) + self._client.poll(timeout_ms=poll_ms) + + # after the long poll, we should check whether the group needs to rebalance + # prior to returning data so that the group can stabilize faster + if self._coordinator.need_rejoin(): + continue + + # We need to make sure we at least keep up with scheduled tasks, + # like heartbeats, auto-commits, and metadata refreshes + timeout_at = self._next_timeout() + + # Short-circuit the fetch iterator if we are already timed out + # to avoid any unintentional interaction with fetcher setup + if time.time() > timeout_at: + continue + + for msg in self._fetcher: + yield msg + if time.time() > timeout_at: + log.debug("internal iterator timeout - breaking for poll") + break + self._client.poll(timeout_ms=0) + + # An else block on a for loop only executes if there was no break + # so this should only be called on a StopIteration from the fetcher + # We assume that it is safe to init_fetches when fetcher is done + # i.e., there are no more records stored internally + else: + self._fetcher.send_fetches() + + def _next_timeout(self): + timeout = min(self._consumer_timeout, + self._client.cluster.ttl() / 1000.0 + time.time(), + self._coordinator.time_to_next_poll() + time.time()) + return timeout + + def __iter__(self): # pylint: disable=non-iterator-returned + return self + + def __next__(self): + if self._closed: + raise StopIteration('KafkaConsumer closed') + # Now that the heartbeat thread runs in the background + # there should be no reason to maintain a separate iterator + # but we'll keep it available for a few releases just in case + if self.config['legacy_iterator']: + return self.next_v1() + else: + return self.next_v2() + + def next_v2(self): + self._set_consumer_timeout() + while time.time() < self._consumer_timeout: + if not self._iterator: + self._iterator = self._message_generator_v2() + try: + return next(self._iterator) + except StopIteration: + self._iterator = None + raise StopIteration() + + def next_v1(self): + if not self._iterator: + self._iterator = self._message_generator() + + self._set_consumer_timeout() + try: + return next(self._iterator) + except StopIteration: + self._iterator = None + raise + + def _set_consumer_timeout(self): + # consumer_timeout_ms can be used to stop iteration early + if self.config['consumer_timeout_ms'] >= 0: + self._consumer_timeout = time.time() + ( + self.config['consumer_timeout_ms'] / 1000.0) diff --git a/testbed/dpkp__kafka-python/kafka/consumer/subscription_state.py b/testbed/dpkp__kafka-python/kafka/consumer/subscription_state.py new file mode 100644 index 0000000000000000000000000000000000000000..08842d13392e981d886423de5786e11357e2d0d1 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/consumer/subscription_state.py @@ -0,0 +1,501 @@ +from __future__ import absolute_import + +import abc +import logging +import re + +from kafka.vendor import six + +from kafka.errors import IllegalStateError +from kafka.protocol.offset import OffsetResetStrategy +from kafka.structs import OffsetAndMetadata + +log = logging.getLogger(__name__) + + +class SubscriptionState(object): + """ + A class for tracking the topics, partitions, and offsets for the consumer. + A partition is "assigned" either directly with assign_from_user() (manual + assignment) or with assign_from_subscribed() (automatic assignment from + subscription). + + Once assigned, the partition is not considered "fetchable" until its initial + position has been set with seek(). Fetchable partitions track a fetch + position which is used to set the offset of the next fetch, and a consumed + position which is the last offset that has been returned to the user. You + can suspend fetching from a partition through pause() without affecting the + fetched/consumed offsets. The partition will remain unfetchable until the + resume() is used. You can also query the pause state independently with + is_paused(). + + Note that pause state as well as fetch/consumed positions are not preserved + when partition assignment is changed whether directly by the user or + through a group rebalance. + + This class also maintains a cache of the latest commit position for each of + the assigned partitions. This is updated through committed() and can be used + to set the initial fetch position (e.g. Fetcher._reset_offset() ). + """ + _SUBSCRIPTION_EXCEPTION_MESSAGE = ( + "You must choose only one way to configure your consumer:" + " (1) subscribe to specific topics by name," + " (2) subscribe to topics matching a regex pattern," + " (3) assign itself specific topic-partitions.") + + # Taken from: https://github.com/apache/kafka/blob/39eb31feaeebfb184d98cc5d94da9148c2319d81/clients/src/main/java/org/apache/kafka/common/internals/Topic.java#L29 + _MAX_NAME_LENGTH = 249 + _TOPIC_LEGAL_CHARS = re.compile('^[a-zA-Z0-9._-]+$') + + def __init__(self, offset_reset_strategy='earliest'): + """Initialize a SubscriptionState instance + + Keyword Arguments: + offset_reset_strategy: 'earliest' or 'latest', otherwise + exception will be raised when fetching an offset that is no + longer available. Default: 'earliest' + """ + try: + offset_reset_strategy = getattr(OffsetResetStrategy, + offset_reset_strategy.upper()) + except AttributeError: + log.warning('Unrecognized offset_reset_strategy, using NONE') + offset_reset_strategy = OffsetResetStrategy.NONE + self._default_offset_reset_strategy = offset_reset_strategy + + self.subscription = None # set() or None + self.subscribed_pattern = None # regex str or None + self._group_subscription = set() + self._user_assignment = set() + self.assignment = dict() + self.listener = None + + # initialize to true for the consumers to fetch offset upon starting up + self.needs_fetch_committed_offsets = True + + def subscribe(self, topics=(), pattern=None, listener=None): + """Subscribe to a list of topics, or a topic regex pattern. + + Partitions will be dynamically assigned via a group coordinator. + Topic subscriptions are not incremental: this list will replace the + current assignment (if there is one). + + This method is incompatible with assign_from_user() + + Arguments: + topics (list): List of topics for subscription. + pattern (str): Pattern to match available topics. You must provide + either topics or pattern, but not both. + listener (ConsumerRebalanceListener): Optionally include listener + callback, which will be called before and after each rebalance + operation. + + As part of group management, the consumer will keep track of the + list of consumers that belong to a particular group and will + trigger a rebalance operation if one of the following events + trigger: + + * Number of partitions change for any of the subscribed topics + * Topic is created or deleted + * An existing member of the consumer group dies + * A new member is added to the consumer group + + When any of these events are triggered, the provided listener + will be invoked first to indicate that the consumer's assignment + has been revoked, and then again when the new assignment has + been received. Note that this listener will immediately override + any listener set in a previous call to subscribe. It is + guaranteed, however, that the partitions revoked/assigned + through this interface are from topics subscribed in this call. + """ + if self._user_assignment or (topics and pattern): + raise IllegalStateError(self._SUBSCRIPTION_EXCEPTION_MESSAGE) + assert topics or pattern, 'Must provide topics or pattern' + + if pattern: + log.info('Subscribing to pattern: /%s/', pattern) + self.subscription = set() + self.subscribed_pattern = re.compile(pattern) + else: + self.change_subscription(topics) + + if listener and not isinstance(listener, ConsumerRebalanceListener): + raise TypeError('listener must be a ConsumerRebalanceListener') + self.listener = listener + + def _ensure_valid_topic_name(self, topic): + """ Ensures that the topic name is valid according to the kafka source. """ + + # See Kafka Source: + # https://github.com/apache/kafka/blob/39eb31feaeebfb184d98cc5d94da9148c2319d81/clients/src/main/java/org/apache/kafka/common/internals/Topic.java + if topic is None: + raise TypeError('All topics must not be None') + if not isinstance(topic, six.string_types): + raise TypeError('All topics must be strings') + if len(topic) == 0: + raise ValueError('All topics must be non-empty strings') + if topic == '.' or topic == '..': + raise ValueError('Topic name cannot be "." or ".."') + if len(topic) > self._MAX_NAME_LENGTH: + raise ValueError('Topic name is illegal, it can\'t be longer than {0} characters, topic: "{1}"'.format(self._MAX_NAME_LENGTH, topic)) + if not self._TOPIC_LEGAL_CHARS.match(topic): + raise ValueError('Topic name "{0}" is illegal, it contains a character other than ASCII alphanumerics, ".", "_" and "-"'.format(topic)) + + def change_subscription(self, topics): + """Change the topic subscription. + + Arguments: + topics (list of str): topics for subscription + + Raises: + IllegalStateError: if assign_from_user has been used already + TypeError: if a topic is None or a non-str + ValueError: if a topic is an empty string or + - a topic name is '.' or '..' or + - a topic name does not consist of ASCII-characters/'-'/'_'/'.' + """ + if self._user_assignment: + raise IllegalStateError(self._SUBSCRIPTION_EXCEPTION_MESSAGE) + + if isinstance(topics, six.string_types): + topics = [topics] + + if self.subscription == set(topics): + log.warning("subscription unchanged by change_subscription(%s)", + topics) + return + + for t in topics: + self._ensure_valid_topic_name(t) + + log.info('Updating subscribed topics to: %s', topics) + self.subscription = set(topics) + self._group_subscription.update(topics) + + # Remove any assigned partitions which are no longer subscribed to + for tp in set(self.assignment.keys()): + if tp.topic not in self.subscription: + del self.assignment[tp] + + def group_subscribe(self, topics): + """Add topics to the current group subscription. + + This is used by the group leader to ensure that it receives metadata + updates for all topics that any member of the group is subscribed to. + + Arguments: + topics (list of str): topics to add to the group subscription + """ + if self._user_assignment: + raise IllegalStateError(self._SUBSCRIPTION_EXCEPTION_MESSAGE) + self._group_subscription.update(topics) + + def reset_group_subscription(self): + """Reset the group's subscription to only contain topics subscribed by this consumer.""" + if self._user_assignment: + raise IllegalStateError(self._SUBSCRIPTION_EXCEPTION_MESSAGE) + assert self.subscription is not None, 'Subscription required' + self._group_subscription.intersection_update(self.subscription) + + def assign_from_user(self, partitions): + """Manually assign a list of TopicPartitions to this consumer. + + This interface does not allow for incremental assignment and will + replace the previous assignment (if there was one). + + Manual topic assignment through this method does not use the consumer's + group management functionality. As such, there will be no rebalance + operation triggered when group membership or cluster and topic metadata + change. Note that it is not possible to use both manual partition + assignment with assign() and group assignment with subscribe(). + + Arguments: + partitions (list of TopicPartition): assignment for this instance. + + Raises: + IllegalStateError: if consumer has already called subscribe() + """ + if self.subscription is not None: + raise IllegalStateError(self._SUBSCRIPTION_EXCEPTION_MESSAGE) + + if self._user_assignment != set(partitions): + self._user_assignment = set(partitions) + + for partition in partitions: + if partition not in self.assignment: + self._add_assigned_partition(partition) + + for tp in set(self.assignment.keys()) - self._user_assignment: + del self.assignment[tp] + + self.needs_fetch_committed_offsets = True + + def assign_from_subscribed(self, assignments): + """Update the assignment to the specified partitions + + This method is called by the coordinator to dynamically assign + partitions based on the consumer's topic subscription. This is different + from assign_from_user() which directly sets the assignment from a + user-supplied TopicPartition list. + + Arguments: + assignments (list of TopicPartition): partitions to assign to this + consumer instance. + """ + if not self.partitions_auto_assigned(): + raise IllegalStateError(self._SUBSCRIPTION_EXCEPTION_MESSAGE) + + for tp in assignments: + if tp.topic not in self.subscription: + raise ValueError("Assigned partition %s for non-subscribed topic." % (tp,)) + + # after rebalancing, we always reinitialize the assignment state + self.assignment.clear() + for tp in assignments: + self._add_assigned_partition(tp) + self.needs_fetch_committed_offsets = True + log.info("Updated partition assignment: %s", assignments) + + def unsubscribe(self): + """Clear all topic subscriptions and partition assignments""" + self.subscription = None + self._user_assignment.clear() + self.assignment.clear() + self.subscribed_pattern = None + + def group_subscription(self): + """Get the topic subscription for the group. + + For the leader, this will include the union of all member subscriptions. + For followers, it is the member's subscription only. + + This is used when querying topic metadata to detect metadata changes + that would require rebalancing (the leader fetches metadata for all + topics in the group so that it can do partition assignment). + + Returns: + set: topics + """ + return self._group_subscription + + def seek(self, partition, offset): + """Manually specify the fetch offset for a TopicPartition. + + Overrides the fetch offsets that the consumer will use on the next + poll(). If this API is invoked for the same partition more than once, + the latest offset will be used on the next poll(). Note that you may + lose data if this API is arbitrarily used in the middle of consumption, + to reset the fetch offsets. + + Arguments: + partition (TopicPartition): partition for seek operation + offset (int): message offset in partition + """ + self.assignment[partition].seek(offset) + + def assigned_partitions(self): + """Return set of TopicPartitions in current assignment.""" + return set(self.assignment.keys()) + + def paused_partitions(self): + """Return current set of paused TopicPartitions.""" + return set(partition for partition in self.assignment + if self.is_paused(partition)) + + def fetchable_partitions(self): + """Return set of TopicPartitions that should be Fetched.""" + fetchable = set() + for partition, state in six.iteritems(self.assignment): + if state.is_fetchable(): + fetchable.add(partition) + return fetchable + + def partitions_auto_assigned(self): + """Return True unless user supplied partitions manually.""" + return self.subscription is not None + + def all_consumed_offsets(self): + """Returns consumed offsets as {TopicPartition: OffsetAndMetadata}""" + all_consumed = {} + for partition, state in six.iteritems(self.assignment): + if state.has_valid_position: + all_consumed[partition] = OffsetAndMetadata(state.position, '') + return all_consumed + + def need_offset_reset(self, partition, offset_reset_strategy=None): + """Mark partition for offset reset using specified or default strategy. + + Arguments: + partition (TopicPartition): partition to mark + offset_reset_strategy (OffsetResetStrategy, optional) + """ + if offset_reset_strategy is None: + offset_reset_strategy = self._default_offset_reset_strategy + self.assignment[partition].await_reset(offset_reset_strategy) + + def has_default_offset_reset_policy(self): + """Return True if default offset reset policy is Earliest or Latest""" + return self._default_offset_reset_strategy != OffsetResetStrategy.NONE + + def is_offset_reset_needed(self, partition): + return self.assignment[partition].awaiting_reset + + def has_all_fetch_positions(self): + for state in self.assignment.values(): + if not state.has_valid_position: + return False + return True + + def missing_fetch_positions(self): + missing = set() + for partition, state in six.iteritems(self.assignment): + if not state.has_valid_position: + missing.add(partition) + return missing + + def is_assigned(self, partition): + return partition in self.assignment + + def is_paused(self, partition): + return partition in self.assignment and self.assignment[partition].paused + + def is_fetchable(self, partition): + return partition in self.assignment and self.assignment[partition].is_fetchable() + + def pause(self, partition): + self.assignment[partition].pause() + + def resume(self, partition): + self.assignment[partition].resume() + + def _add_assigned_partition(self, partition): + self.assignment[partition] = TopicPartitionState() + + +class TopicPartitionState(object): + def __init__(self): + self.committed = None # last committed OffsetAndMetadata + self.has_valid_position = False # whether we have valid position + self.paused = False # whether this partition has been paused by the user + self.awaiting_reset = False # whether we are awaiting reset + self.reset_strategy = None # the reset strategy if awaitingReset is set + self._position = None # offset exposed to the user + self.highwater = None + self.drop_pending_message_set = False + # The last message offset hint available from a message batch with + # magic=2 which includes deleted compacted messages + self.last_offset_from_message_batch = None + + def _set_position(self, offset): + assert self.has_valid_position, 'Valid position required' + self._position = offset + + def _get_position(self): + return self._position + + position = property(_get_position, _set_position, None, "last position") + + def await_reset(self, strategy): + self.awaiting_reset = True + self.reset_strategy = strategy + self._position = None + self.last_offset_from_message_batch = None + self.has_valid_position = False + + def seek(self, offset): + self._position = offset + self.awaiting_reset = False + self.reset_strategy = None + self.has_valid_position = True + self.drop_pending_message_set = True + self.last_offset_from_message_batch = None + + def pause(self): + self.paused = True + + def resume(self): + self.paused = False + + def is_fetchable(self): + return not self.paused and self.has_valid_position + + +class ConsumerRebalanceListener(object): + """ + A callback interface that the user can implement to trigger custom actions + when the set of partitions assigned to the consumer changes. + + This is applicable when the consumer is having Kafka auto-manage group + membership. If the consumer's directly assign partitions, those + partitions will never be reassigned and this callback is not applicable. + + When Kafka is managing the group membership, a partition re-assignment will + be triggered any time the members of the group changes or the subscription + of the members changes. This can occur when processes die, new process + instances are added or old instances come back to life after failure. + Rebalances can also be triggered by changes affecting the subscribed + topics (e.g. when then number of partitions is administratively adjusted). + + There are many uses for this functionality. One common use is saving offsets + in a custom store. By saving offsets in the on_partitions_revoked(), call we + can ensure that any time partition assignment changes the offset gets saved. + + Another use is flushing out any kind of cache of intermediate results the + consumer may be keeping. For example, consider a case where the consumer is + subscribed to a topic containing user page views, and the goal is to count + the number of page views per users for each five minute window. Let's say + the topic is partitioned by the user id so that all events for a particular + user will go to a single consumer instance. The consumer can keep in memory + a running tally of actions per user and only flush these out to a remote + data store when its cache gets too big. However if a partition is reassigned + it may want to automatically trigger a flush of this cache, before the new + owner takes over consumption. + + This callback will execute in the user thread as part of the Consumer.poll() + whenever partition assignment changes. + + It is guaranteed that all consumer processes will invoke + on_partitions_revoked() prior to any process invoking + on_partitions_assigned(). So if offsets or other state is saved in the + on_partitions_revoked() call, it should be saved by the time the process + taking over that partition has their on_partitions_assigned() callback + called to load the state. + """ + __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def on_partitions_revoked(self, revoked): + """ + A callback method the user can implement to provide handling of offset + commits to a customized store on the start of a rebalance operation. + This method will be called before a rebalance operation starts and + after the consumer stops fetching data. It is recommended that offsets + should be committed in this callback to either Kafka or a custom offset + store to prevent duplicate data. + + NOTE: This method is only called before rebalances. It is not called + prior to KafkaConsumer.close() + + Arguments: + revoked (list of TopicPartition): the partitions that were assigned + to the consumer on the last rebalance + """ + pass + + @abc.abstractmethod + def on_partitions_assigned(self, assigned): + """ + A callback method the user can implement to provide handling of + customized offsets on completion of a successful partition + re-assignment. This method will be called after an offset re-assignment + completes and before the consumer starts fetching data. + + It is guaranteed that all the processes in a consumer group will execute + their on_partitions_revoked() callback before any instance executes its + on_partitions_assigned() callback. + + Arguments: + assigned (list of TopicPartition): the partitions assigned to the + consumer (may include partitions that were previously assigned) + """ + pass diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/__init__.py b/testbed/dpkp__kafka-python/kafka/coordinator/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/assignors/__init__.py b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/assignors/abstract.py b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/abstract.py new file mode 100644 index 0000000000000000000000000000000000000000..a1fef3840e6634a74bfe4e9061b63cfd53aec661 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/abstract.py @@ -0,0 +1,56 @@ +from __future__ import absolute_import + +import abc +import logging + +log = logging.getLogger(__name__) + + +class AbstractPartitionAssignor(object): + """ + Abstract assignor implementation which does some common grunt work (in particular collecting + partition counts which are always needed in assignors). + """ + + @abc.abstractproperty + def name(self): + """.name should be a string identifying the assignor""" + pass + + @abc.abstractmethod + def assign(self, cluster, members): + """Perform group assignment given cluster metadata and member subscriptions + + Arguments: + cluster (ClusterMetadata): metadata for use in assignment + members (dict of {member_id: MemberMetadata}): decoded metadata for + each member in the group. + + Returns: + dict: {member_id: MemberAssignment} + """ + pass + + @abc.abstractmethod + def metadata(self, topics): + """Generate ProtocolMetadata to be submitted via JoinGroupRequest. + + Arguments: + topics (set): a member's subscribed topics + + Returns: + MemberMetadata struct + """ + pass + + @abc.abstractmethod + def on_assignment(self, assignment): + """Callback that runs on each assignment. + + This method can be used to update internal state, if any, of the + partition assignor. + + Arguments: + assignment (MemberAssignment): the member's assignment + """ + pass diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/assignors/range.py b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/range.py new file mode 100644 index 0000000000000000000000000000000000000000..299e39c488f27f85542003dfbb2e6e8fa6030d9f --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/range.py @@ -0,0 +1,77 @@ +from __future__ import absolute_import + +import collections +import logging + +from kafka.vendor import six + +from kafka.coordinator.assignors.abstract import AbstractPartitionAssignor +from kafka.coordinator.protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment + +log = logging.getLogger(__name__) + + +class RangePartitionAssignor(AbstractPartitionAssignor): + """ + The range assignor works on a per-topic basis. For each topic, we lay out + the available partitions in numeric order and the consumers in + lexicographic order. We then divide the number of partitions by the total + number of consumers to determine the number of partitions to assign to each + consumer. If it does not evenly divide, then the first few consumers will + have one extra partition. + + For example, suppose there are two consumers C0 and C1, two topics t0 and + t1, and each topic has 3 partitions, resulting in partitions t0p0, t0p1, + t0p2, t1p0, t1p1, and t1p2. + + The assignment will be: + C0: [t0p0, t0p1, t1p0, t1p1] + C1: [t0p2, t1p2] + """ + name = 'range' + version = 0 + + @classmethod + def assign(cls, cluster, member_metadata): + consumers_per_topic = collections.defaultdict(list) + for member, metadata in six.iteritems(member_metadata): + for topic in metadata.subscription: + consumers_per_topic[topic].append(member) + + # construct {member_id: {topic: [partition, ...]}} + assignment = collections.defaultdict(dict) + + for topic, consumers_for_topic in six.iteritems(consumers_per_topic): + partitions = cluster.partitions_for_topic(topic) + if partitions is None: + log.warning('No partition metadata for topic %s', topic) + continue + partitions = sorted(partitions) + consumers_for_topic.sort() + + partitions_per_consumer = len(partitions) // len(consumers_for_topic) + consumers_with_extra = len(partitions) % len(consumers_for_topic) + + for i, member in enumerate(consumers_for_topic): + start = partitions_per_consumer * i + start += min(i, consumers_with_extra) + length = partitions_per_consumer + if not i + 1 > consumers_with_extra: + length += 1 + assignment[member][topic] = partitions[start:start+length] + + protocol_assignment = {} + for member_id in member_metadata: + protocol_assignment[member_id] = ConsumerProtocolMemberAssignment( + cls.version, + sorted(assignment[member_id].items()), + b'') + return protocol_assignment + + @classmethod + def metadata(cls, topics): + return ConsumerProtocolMemberMetadata(cls.version, list(topics), b'') + + @classmethod + def on_assignment(cls, assignment): + pass diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/assignors/roundrobin.py b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/roundrobin.py new file mode 100644 index 0000000000000000000000000000000000000000..2d24a5c8be67dfd32e147b652f9eca344b903849 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/roundrobin.py @@ -0,0 +1,96 @@ +from __future__ import absolute_import + +import collections +import itertools +import logging + +from kafka.vendor import six + +from kafka.coordinator.assignors.abstract import AbstractPartitionAssignor +from kafka.coordinator.protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment +from kafka.structs import TopicPartition + +log = logging.getLogger(__name__) + + +class RoundRobinPartitionAssignor(AbstractPartitionAssignor): + """ + The roundrobin assignor lays out all the available partitions and all the + available consumers. It then proceeds to do a roundrobin assignment from + partition to consumer. If the subscriptions of all consumer instances are + identical, then the partitions will be uniformly distributed. (i.e., the + partition ownership counts will be within a delta of exactly one across all + consumers.) + + For example, suppose there are two consumers C0 and C1, two topics t0 and + t1, and each topic has 3 partitions, resulting in partitions t0p0, t0p1, + t0p2, t1p0, t1p1, and t1p2. + + The assignment will be: + C0: [t0p0, t0p2, t1p1] + C1: [t0p1, t1p0, t1p2] + + When subscriptions differ across consumer instances, the assignment process + still considers each consumer instance in round robin fashion but skips + over an instance if it is not subscribed to the topic. Unlike the case when + subscriptions are identical, this can result in imbalanced assignments. + + For example, suppose we have three consumers C0, C1, C2, and three topics + t0, t1, t2, with unbalanced partitions t0p0, t1p0, t1p1, t2p0, t2p1, t2p2, + where C0 is subscribed to t0; C1 is subscribed to t0, t1; and C2 is + subscribed to t0, t1, t2. + + The assignment will be: + C0: [t0p0] + C1: [t1p0] + C2: [t1p1, t2p0, t2p1, t2p2] + """ + name = 'roundrobin' + version = 0 + + @classmethod + def assign(cls, cluster, member_metadata): + all_topics = set() + for metadata in six.itervalues(member_metadata): + all_topics.update(metadata.subscription) + + all_topic_partitions = [] + for topic in all_topics: + partitions = cluster.partitions_for_topic(topic) + if partitions is None: + log.warning('No partition metadata for topic %s', topic) + continue + for partition in partitions: + all_topic_partitions.append(TopicPartition(topic, partition)) + all_topic_partitions.sort() + + # construct {member_id: {topic: [partition, ...]}} + assignment = collections.defaultdict(lambda: collections.defaultdict(list)) + + member_iter = itertools.cycle(sorted(member_metadata.keys())) + for partition in all_topic_partitions: + member_id = next(member_iter) + + # Because we constructed all_topic_partitions from the set of + # member subscribed topics, we should be safe assuming that + # each topic in all_topic_partitions is in at least one member + # subscription; otherwise this could yield an infinite loop + while partition.topic not in member_metadata[member_id].subscription: + member_id = next(member_iter) + assignment[member_id][partition.topic].append(partition.partition) + + protocol_assignment = {} + for member_id in member_metadata: + protocol_assignment[member_id] = ConsumerProtocolMemberAssignment( + cls.version, + sorted(assignment[member_id].items()), + b'') + return protocol_assignment + + @classmethod + def metadata(cls, topics): + return ConsumerProtocolMemberMetadata(cls.version, list(topics), b'') + + @classmethod + def on_assignment(cls, assignment): + pass diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/assignors/sticky/__init__.py b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/sticky/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/assignors/sticky/partition_movements.py b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/sticky/partition_movements.py new file mode 100644 index 0000000000000000000000000000000000000000..8851e4cda2c196c7dc4d4cfcdf6e2af5ed33ede3 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/sticky/partition_movements.py @@ -0,0 +1,149 @@ +import logging +from collections import defaultdict, namedtuple +from copy import deepcopy + +from kafka.vendor import six + +log = logging.getLogger(__name__) + + +ConsumerPair = namedtuple("ConsumerPair", ["src_member_id", "dst_member_id"]) +""" +Represents a pair of Kafka consumer ids involved in a partition reassignment. +Each ConsumerPair corresponds to a particular partition or topic, indicates that the particular partition or some +partition of the particular topic was moved from the source consumer to the destination consumer +during the rebalance. This class helps in determining whether a partition reassignment results in cycles among +the generated graph of consumer pairs. +""" + + +def is_sublist(source, target): + """Checks if one list is a sublist of another. + + Arguments: + source: the list in which to search for the occurrence of target. + target: the list to search for as a sublist of source + + Returns: + true if target is in source; false otherwise + """ + for index in (i for i, e in enumerate(source) if e == target[0]): + if tuple(source[index: index + len(target)]) == target: + return True + return False + + +class PartitionMovements: + """ + This class maintains some data structures to simplify lookup of partition movements among consumers. + At each point of time during a partition rebalance it keeps track of partition movements + corresponding to each topic, and also possible movement (in form a ConsumerPair object) for each partition. + """ + + def __init__(self): + self.partition_movements_by_topic = defaultdict( + lambda: defaultdict(set) + ) + self.partition_movements = {} + + def move_partition(self, partition, old_consumer, new_consumer): + pair = ConsumerPair(src_member_id=old_consumer, dst_member_id=new_consumer) + if partition in self.partition_movements: + # this partition has previously moved + existing_pair = self._remove_movement_record_of_partition(partition) + assert existing_pair.dst_member_id == old_consumer + if existing_pair.src_member_id != new_consumer: + # the partition is not moving back to its previous consumer + self._add_partition_movement_record( + partition, ConsumerPair(src_member_id=existing_pair.src_member_id, dst_member_id=new_consumer) + ) + else: + self._add_partition_movement_record(partition, pair) + + def get_partition_to_be_moved(self, partition, old_consumer, new_consumer): + if partition.topic not in self.partition_movements_by_topic: + return partition + if partition in self.partition_movements: + # this partition has previously moved + assert old_consumer == self.partition_movements[partition].dst_member_id + old_consumer = self.partition_movements[partition].src_member_id + reverse_pair = ConsumerPair(src_member_id=new_consumer, dst_member_id=old_consumer) + if reverse_pair not in self.partition_movements_by_topic[partition.topic]: + return partition + + return next(iter(self.partition_movements_by_topic[partition.topic][reverse_pair])) + + def are_sticky(self): + for topic, movements in six.iteritems(self.partition_movements_by_topic): + movement_pairs = set(movements.keys()) + if self._has_cycles(movement_pairs): + log.error( + "Stickiness is violated for topic {}\n" + "Partition movements for this topic occurred among the following consumer pairs:\n" + "{}".format(topic, movement_pairs) + ) + return False + return True + + def _remove_movement_record_of_partition(self, partition): + pair = self.partition_movements[partition] + del self.partition_movements[partition] + + self.partition_movements_by_topic[partition.topic][pair].remove(partition) + if not self.partition_movements_by_topic[partition.topic][pair]: + del self.partition_movements_by_topic[partition.topic][pair] + if not self.partition_movements_by_topic[partition.topic]: + del self.partition_movements_by_topic[partition.topic] + + return pair + + def _add_partition_movement_record(self, partition, pair): + self.partition_movements[partition] = pair + self.partition_movements_by_topic[partition.topic][pair].add(partition) + + def _has_cycles(self, consumer_pairs): + cycles = set() + for pair in consumer_pairs: + reduced_pairs = deepcopy(consumer_pairs) + reduced_pairs.remove(pair) + path = [pair.src_member_id] + if self._is_linked(pair.dst_member_id, pair.src_member_id, reduced_pairs, path) and not self._is_subcycle( + path, cycles + ): + cycles.add(tuple(path)) + log.error("A cycle of length {} was found: {}".format(len(path) - 1, path)) + + # for now we want to make sure there is no partition movements of the same topic between a pair of consumers. + # the odds of finding a cycle among more than two consumers seem to be very low (according to various randomized + # tests with the given sticky algorithm) that it should not worth the added complexity of handling those cases. + for cycle in cycles: + if len(cycle) == 3: # indicates a cycle of length 2 + return True + return False + + @staticmethod + def _is_subcycle(cycle, cycles): + super_cycle = deepcopy(cycle) + super_cycle = super_cycle[:-1] + super_cycle.extend(cycle) + for found_cycle in cycles: + if len(found_cycle) == len(cycle) and is_sublist(super_cycle, found_cycle): + return True + return False + + def _is_linked(self, src, dst, pairs, current_path): + if src == dst: + return False + if not pairs: + return False + if ConsumerPair(src, dst) in pairs: + current_path.append(src) + current_path.append(dst) + return True + for pair in pairs: + if pair.src_member_id == src: + reduced_set = deepcopy(pairs) + reduced_set.remove(pair) + current_path.append(pair.src_member_id) + return self._is_linked(pair.dst_member_id, dst, reduced_set, current_path) + return False diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/assignors/sticky/sorted_set.py b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/sticky/sorted_set.py new file mode 100644 index 0000000000000000000000000000000000000000..6a454a42db2244ba396912d6d31447aea75443b5 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/sticky/sorted_set.py @@ -0,0 +1,63 @@ +class SortedSet: + def __init__(self, iterable=None, key=None): + self._key = key if key is not None else lambda x: x + self._set = set(iterable) if iterable is not None else set() + + self._cached_last = None + self._cached_first = None + + def first(self): + if self._cached_first is not None: + return self._cached_first + + first = None + for element in self._set: + if first is None or self._key(first) > self._key(element): + first = element + self._cached_first = first + return first + + def last(self): + if self._cached_last is not None: + return self._cached_last + + last = None + for element in self._set: + if last is None or self._key(last) < self._key(element): + last = element + self._cached_last = last + return last + + def pop_last(self): + value = self.last() + self._set.remove(value) + self._cached_last = None + return value + + def add(self, value): + if self._cached_last is not None and self._key(value) > self._key(self._cached_last): + self._cached_last = value + if self._cached_first is not None and self._key(value) < self._key(self._cached_first): + self._cached_first = value + + return self._set.add(value) + + def remove(self, value): + if self._cached_last is not None and self._cached_last == value: + self._cached_last = None + if self._cached_first is not None and self._cached_first == value: + self._cached_first = None + + return self._set.remove(value) + + def __contains__(self, value): + return value in self._set + + def __iter__(self): + return iter(sorted(self._set, key=self._key)) + + def _bool(self): + return len(self._set) != 0 + + __nonzero__ = _bool + __bool__ = _bool diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/assignors/sticky/sticky_assignor.py b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/sticky/sticky_assignor.py new file mode 100644 index 0000000000000000000000000000000000000000..dce714f1a23f464d25da688b713df8b89ef6ad92 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/coordinator/assignors/sticky/sticky_assignor.py @@ -0,0 +1,685 @@ +import logging +from collections import defaultdict, namedtuple +from copy import deepcopy + +from kafka.cluster import ClusterMetadata +from kafka.coordinator.assignors.abstract import AbstractPartitionAssignor +from kafka.coordinator.assignors.sticky.partition_movements import PartitionMovements +from kafka.coordinator.assignors.sticky.sorted_set import SortedSet +from kafka.coordinator.protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment +from kafka.coordinator.protocol import Schema +from kafka.protocol.struct import Struct +from kafka.protocol.types import String, Array, Int32 +from kafka.structs import TopicPartition +from kafka.vendor import six + +log = logging.getLogger(__name__) + +ConsumerGenerationPair = namedtuple("ConsumerGenerationPair", ["consumer", "generation"]) + + +def has_identical_list_elements(list_): + """Checks if all lists in the collection have the same members + + Arguments: + list_: collection of lists + + Returns: + true if all lists in the collection have the same members; false otherwise + """ + if not list_: + return True + for i in range(1, len(list_)): + if list_[i] != list_[i - 1]: + return False + return True + + +def subscriptions_comparator_key(element): + return len(element[1]), element[0] + + +def partitions_comparator_key(element): + return len(element[1]), element[0].topic, element[0].partition + + +def remove_if_present(collection, element): + try: + collection.remove(element) + except (ValueError, KeyError): + pass + + +StickyAssignorMemberMetadataV1 = namedtuple("StickyAssignorMemberMetadataV1", + ["subscription", "partitions", "generation"]) + + +class StickyAssignorUserDataV1(Struct): + """ + Used for preserving consumer's previously assigned partitions + list and sending it as user data to the leader during a rebalance + """ + + SCHEMA = Schema( + ("previous_assignment", Array(("topic", String("utf-8")), ("partitions", Array(Int32)))), ("generation", Int32) + ) + + +class StickyAssignmentExecutor: + def __init__(self, cluster, members): + self.members = members + # a mapping between consumers and their assigned partitions that is updated during assignment procedure + self.current_assignment = defaultdict(list) + # an assignment from a previous generation + self.previous_assignment = {} + # a mapping between partitions and their assigned consumers + self.current_partition_consumer = {} + # a flag indicating that there were no previous assignments performed ever + self.is_fresh_assignment = False + # a mapping of all topic partitions to all consumers that can be assigned to them + self.partition_to_all_potential_consumers = {} + # a mapping of all consumers to all potential topic partitions that can be assigned to them + self.consumer_to_all_potential_partitions = {} + # an ascending sorted set of consumers based on how many topic partitions are already assigned to them + self.sorted_current_subscriptions = SortedSet() + # an ascending sorted list of topic partitions based on how many consumers can potentially use them + self.sorted_partitions = [] + # all partitions that need to be assigned + self.unassigned_partitions = [] + # a flag indicating that a certain partition cannot remain assigned to its current consumer because the consumer + # is no longer subscribed to its topic + self.revocation_required = False + + self.partition_movements = PartitionMovements() + self._initialize(cluster) + + def perform_initial_assignment(self): + self._populate_sorted_partitions() + self._populate_partitions_to_reassign() + + def balance(self): + self._initialize_current_subscriptions() + initializing = len(self.current_assignment[self._get_consumer_with_most_subscriptions()]) == 0 + + # assign all unassigned partitions + for partition in self.unassigned_partitions: + # skip if there is no potential consumer for the partition + if not self.partition_to_all_potential_consumers[partition]: + continue + self._assign_partition(partition) + + # narrow down the reassignment scope to only those partitions that can actually be reassigned + fixed_partitions = set() + for partition in six.iterkeys(self.partition_to_all_potential_consumers): + if not self._can_partition_participate_in_reassignment(partition): + fixed_partitions.add(partition) + for fixed_partition in fixed_partitions: + remove_if_present(self.sorted_partitions, fixed_partition) + remove_if_present(self.unassigned_partitions, fixed_partition) + + # narrow down the reassignment scope to only those consumers that are subject to reassignment + fixed_assignments = {} + for consumer in six.iterkeys(self.consumer_to_all_potential_partitions): + if not self._can_consumer_participate_in_reassignment(consumer): + self._remove_consumer_from_current_subscriptions_and_maintain_order(consumer) + fixed_assignments[consumer] = self.current_assignment[consumer] + del self.current_assignment[consumer] + + # create a deep copy of the current assignment so we can revert to it + # if we do not get a more balanced assignment later + prebalance_assignment = deepcopy(self.current_assignment) + prebalance_partition_consumers = deepcopy(self.current_partition_consumer) + + # if we don't already need to revoke something due to subscription changes, + # first try to balance by only moving newly added partitions + if not self.revocation_required: + self._perform_reassignments(self.unassigned_partitions) + reassignment_performed = self._perform_reassignments(self.sorted_partitions) + + # if we are not preserving existing assignments and we have made changes to the current assignment + # make sure we are getting a more balanced assignment; otherwise, revert to previous assignment + if ( + not initializing + and reassignment_performed + and self._get_balance_score(self.current_assignment) >= self._get_balance_score(prebalance_assignment) + ): + self.current_assignment = prebalance_assignment + self.current_partition_consumer.clear() + self.current_partition_consumer.update(prebalance_partition_consumers) + + # add the fixed assignments (those that could not change) back + for consumer, partitions in six.iteritems(fixed_assignments): + self.current_assignment[consumer] = partitions + self._add_consumer_to_current_subscriptions_and_maintain_order(consumer) + + def get_final_assignment(self, member_id): + assignment = defaultdict(list) + for topic_partition in self.current_assignment[member_id]: + assignment[topic_partition.topic].append(topic_partition.partition) + assignment = {k: sorted(v) for k, v in six.iteritems(assignment)} + return six.viewitems(assignment) + + def _initialize(self, cluster): + self._init_current_assignments(self.members) + + for topic in cluster.topics(): + partitions = cluster.partitions_for_topic(topic) + if partitions is None: + log.warning("No partition metadata for topic %s", topic) + continue + for p in partitions: + partition = TopicPartition(topic=topic, partition=p) + self.partition_to_all_potential_consumers[partition] = [] + for consumer_id, member_metadata in six.iteritems(self.members): + self.consumer_to_all_potential_partitions[consumer_id] = [] + for topic in member_metadata.subscription: + if cluster.partitions_for_topic(topic) is None: + log.warning("No partition metadata for topic {}".format(topic)) + continue + for p in cluster.partitions_for_topic(topic): + partition = TopicPartition(topic=topic, partition=p) + self.consumer_to_all_potential_partitions[consumer_id].append(partition) + self.partition_to_all_potential_consumers[partition].append(consumer_id) + if consumer_id not in self.current_assignment: + self.current_assignment[consumer_id] = [] + + def _init_current_assignments(self, members): + # we need to process subscriptions' user data with each consumer's reported generation in mind + # higher generations overwrite lower generations in case of a conflict + # note that a conflict could exists only if user data is for different generations + + # for each partition we create a map of its consumers by generation + sorted_partition_consumers_by_generation = {} + for consumer, member_metadata in six.iteritems(members): + for partitions in member_metadata.partitions: + if partitions in sorted_partition_consumers_by_generation: + consumers = sorted_partition_consumers_by_generation[partitions] + if member_metadata.generation and member_metadata.generation in consumers: + # same partition is assigned to two consumers during the same rebalance. + # log a warning and skip this record + log.warning( + "Partition {} is assigned to multiple consumers " + "following sticky assignment generation {}.".format(partitions, member_metadata.generation) + ) + else: + consumers[member_metadata.generation] = consumer + else: + sorted_consumers = {member_metadata.generation: consumer} + sorted_partition_consumers_by_generation[partitions] = sorted_consumers + + # previous_assignment holds the prior ConsumerGenerationPair (before current) of each partition + # current and previous consumers are the last two consumers of each partition in the above sorted map + for partitions, consumers in six.iteritems(sorted_partition_consumers_by_generation): + generations = sorted(consumers.keys(), reverse=True) + self.current_assignment[consumers[generations[0]]].append(partitions) + # now update previous assignment if any + if len(generations) > 1: + self.previous_assignment[partitions] = ConsumerGenerationPair( + consumer=consumers[generations[1]], generation=generations[1] + ) + + self.is_fresh_assignment = len(self.current_assignment) == 0 + + for consumer_id, partitions in six.iteritems(self.current_assignment): + for partition in partitions: + self.current_partition_consumer[partition] = consumer_id + + def _are_subscriptions_identical(self): + """ + Returns: + true, if both potential consumers of partitions and potential partitions that consumers can + consume are the same + """ + if not has_identical_list_elements(list(six.itervalues(self.partition_to_all_potential_consumers))): + return False + return has_identical_list_elements(list(six.itervalues(self.consumer_to_all_potential_partitions))) + + def _populate_sorted_partitions(self): + # set of topic partitions with their respective potential consumers + all_partitions = set((tp, tuple(consumers)) + for tp, consumers in six.iteritems(self.partition_to_all_potential_consumers)) + partitions_sorted_by_num_of_potential_consumers = sorted(all_partitions, key=partitions_comparator_key) + + self.sorted_partitions = [] + if not self.is_fresh_assignment and self._are_subscriptions_identical(): + # if this is a reassignment and the subscriptions are identical (all consumers can consumer from all topics) + # then we just need to simply list partitions in a round robin fashion (from consumers with + # most assigned partitions to those with least) + assignments = deepcopy(self.current_assignment) + for consumer_id, partitions in six.iteritems(assignments): + to_remove = [] + for partition in partitions: + if partition not in self.partition_to_all_potential_consumers: + to_remove.append(partition) + for partition in to_remove: + partitions.remove(partition) + + sorted_consumers = SortedSet( + iterable=[(consumer, tuple(partitions)) for consumer, partitions in six.iteritems(assignments)], + key=subscriptions_comparator_key, + ) + # at this point, sorted_consumers contains an ascending-sorted list of consumers based on + # how many valid partitions are currently assigned to them + while sorted_consumers: + # take the consumer with the most partitions + consumer, _ = sorted_consumers.pop_last() + # currently assigned partitions to this consumer + remaining_partitions = assignments[consumer] + # from partitions that had a different consumer before, + # keep only those that are assigned to this consumer now + previous_partitions = set(six.iterkeys(self.previous_assignment)).intersection(set(remaining_partitions)) + if previous_partitions: + # if there is a partition of this consumer that was assigned to another consumer before + # mark it as good options for reassignment + partition = previous_partitions.pop() + remaining_partitions.remove(partition) + self.sorted_partitions.append(partition) + sorted_consumers.add((consumer, tuple(assignments[consumer]))) + elif remaining_partitions: + # otherwise, mark any other one of the current partitions as a reassignment candidate + self.sorted_partitions.append(remaining_partitions.pop()) + sorted_consumers.add((consumer, tuple(assignments[consumer]))) + + while partitions_sorted_by_num_of_potential_consumers: + partition = partitions_sorted_by_num_of_potential_consumers.pop(0)[0] + if partition not in self.sorted_partitions: + self.sorted_partitions.append(partition) + else: + while partitions_sorted_by_num_of_potential_consumers: + self.sorted_partitions.append(partitions_sorted_by_num_of_potential_consumers.pop(0)[0]) + + def _populate_partitions_to_reassign(self): + self.unassigned_partitions = deepcopy(self.sorted_partitions) + + assignments_to_remove = [] + for consumer_id, partitions in six.iteritems(self.current_assignment): + if consumer_id not in self.members: + # if a consumer that existed before (and had some partition assignments) is now removed, + # remove it from current_assignment + for partition in partitions: + del self.current_partition_consumer[partition] + assignments_to_remove.append(consumer_id) + else: + # otherwise (the consumer still exists) + partitions_to_remove = [] + for partition in partitions: + if partition not in self.partition_to_all_potential_consumers: + # if this topic partition of this consumer no longer exists + # remove it from current_assignment of the consumer + partitions_to_remove.append(partition) + elif partition.topic not in self.members[consumer_id].subscription: + # if this partition cannot remain assigned to its current consumer because the consumer + # is no longer subscribed to its topic remove it from current_assignment of the consumer + partitions_to_remove.append(partition) + self.revocation_required = True + else: + # otherwise, remove the topic partition from those that need to be assigned only if + # its current consumer is still subscribed to its topic (because it is already assigned + # and we would want to preserve that assignment as much as possible) + self.unassigned_partitions.remove(partition) + for partition in partitions_to_remove: + self.current_assignment[consumer_id].remove(partition) + del self.current_partition_consumer[partition] + for consumer_id in assignments_to_remove: + del self.current_assignment[consumer_id] + + def _initialize_current_subscriptions(self): + self.sorted_current_subscriptions = SortedSet( + iterable=[(consumer, tuple(partitions)) for consumer, partitions in six.iteritems(self.current_assignment)], + key=subscriptions_comparator_key, + ) + + def _get_consumer_with_least_subscriptions(self): + return self.sorted_current_subscriptions.first()[0] + + def _get_consumer_with_most_subscriptions(self): + return self.sorted_current_subscriptions.last()[0] + + def _remove_consumer_from_current_subscriptions_and_maintain_order(self, consumer): + self.sorted_current_subscriptions.remove((consumer, tuple(self.current_assignment[consumer]))) + + def _add_consumer_to_current_subscriptions_and_maintain_order(self, consumer): + self.sorted_current_subscriptions.add((consumer, tuple(self.current_assignment[consumer]))) + + def _is_balanced(self): + """Determines if the current assignment is a balanced one""" + if ( + len(self.current_assignment[self._get_consumer_with_least_subscriptions()]) + >= len(self.current_assignment[self._get_consumer_with_most_subscriptions()]) - 1 + ): + # if minimum and maximum numbers of partitions assigned to consumers differ by at most one return true + return True + + # create a mapping from partitions to the consumer assigned to them + all_assigned_partitions = {} + for consumer_id, consumer_partitions in six.iteritems(self.current_assignment): + for partition in consumer_partitions: + if partition in all_assigned_partitions: + log.error("{} is assigned to more than one consumer.".format(partition)) + all_assigned_partitions[partition] = consumer_id + + # for each consumer that does not have all the topic partitions it can get + # make sure none of the topic partitions it could but did not get cannot be moved to it + # (because that would break the balance) + for consumer, _ in self.sorted_current_subscriptions: + consumer_partition_count = len(self.current_assignment[consumer]) + # skip if this consumer already has all the topic partitions it can get + if consumer_partition_count == len(self.consumer_to_all_potential_partitions[consumer]): + continue + + # otherwise make sure it cannot get any more + for partition in self.consumer_to_all_potential_partitions[consumer]: + if partition not in self.current_assignment[consumer]: + other_consumer = all_assigned_partitions[partition] + other_consumer_partition_count = len(self.current_assignment[other_consumer]) + if consumer_partition_count < other_consumer_partition_count: + return False + return True + + def _assign_partition(self, partition): + for consumer, _ in self.sorted_current_subscriptions: + if partition in self.consumer_to_all_potential_partitions[consumer]: + self._remove_consumer_from_current_subscriptions_and_maintain_order(consumer) + self.current_assignment[consumer].append(partition) + self.current_partition_consumer[partition] = consumer + self._add_consumer_to_current_subscriptions_and_maintain_order(consumer) + break + + def _can_partition_participate_in_reassignment(self, partition): + return len(self.partition_to_all_potential_consumers[partition]) >= 2 + + def _can_consumer_participate_in_reassignment(self, consumer): + current_partitions = self.current_assignment[consumer] + current_assignment_size = len(current_partitions) + max_assignment_size = len(self.consumer_to_all_potential_partitions[consumer]) + if current_assignment_size > max_assignment_size: + log.error("The consumer {} is assigned more partitions than the maximum possible.".format(consumer)) + if current_assignment_size < max_assignment_size: + # if a consumer is not assigned all its potential partitions it is subject to reassignment + return True + for partition in current_partitions: + # if any of the partitions assigned to a consumer is subject to reassignment the consumer itself + # is subject to reassignment + if self._can_partition_participate_in_reassignment(partition): + return True + return False + + def _perform_reassignments(self, reassignable_partitions): + reassignment_performed = False + + # repeat reassignment until no partition can be moved to improve the balance + while True: + modified = False + # reassign all reassignable partitions until the full list is processed or a balance is achieved + # (starting from the partition with least potential consumers and if needed) + for partition in reassignable_partitions: + if self._is_balanced(): + break + # the partition must have at least two potential consumers + if len(self.partition_to_all_potential_consumers[partition]) <= 1: + log.error("Expected more than one potential consumer for partition {}".format(partition)) + # the partition must have a current consumer + consumer = self.current_partition_consumer.get(partition) + if consumer is None: + log.error("Expected partition {} to be assigned to a consumer".format(partition)) + + if ( + partition in self.previous_assignment + and len(self.current_assignment[consumer]) + > len(self.current_assignment[self.previous_assignment[partition].consumer]) + 1 + ): + self._reassign_partition_to_consumer( + partition, self.previous_assignment[partition].consumer, + ) + reassignment_performed = True + modified = True + continue + + # check if a better-suited consumer exist for the partition; if so, reassign it + for other_consumer in self.partition_to_all_potential_consumers[partition]: + if len(self.current_assignment[consumer]) > len(self.current_assignment[other_consumer]) + 1: + self._reassign_partition(partition) + reassignment_performed = True + modified = True + break + + if not modified: + break + return reassignment_performed + + def _reassign_partition(self, partition): + new_consumer = None + for another_consumer, _ in self.sorted_current_subscriptions: + if partition in self.consumer_to_all_potential_partitions[another_consumer]: + new_consumer = another_consumer + break + assert new_consumer is not None + self._reassign_partition_to_consumer(partition, new_consumer) + + def _reassign_partition_to_consumer(self, partition, new_consumer): + consumer = self.current_partition_consumer[partition] + # find the correct partition movement considering the stickiness requirement + partition_to_be_moved = self.partition_movements.get_partition_to_be_moved(partition, consumer, new_consumer) + self._move_partition(partition_to_be_moved, new_consumer) + + def _move_partition(self, partition, new_consumer): + old_consumer = self.current_partition_consumer[partition] + self._remove_consumer_from_current_subscriptions_and_maintain_order(old_consumer) + self._remove_consumer_from_current_subscriptions_and_maintain_order(new_consumer) + + self.partition_movements.move_partition(partition, old_consumer, new_consumer) + + self.current_assignment[old_consumer].remove(partition) + self.current_assignment[new_consumer].append(partition) + self.current_partition_consumer[partition] = new_consumer + + self._add_consumer_to_current_subscriptions_and_maintain_order(new_consumer) + self._add_consumer_to_current_subscriptions_and_maintain_order(old_consumer) + + @staticmethod + def _get_balance_score(assignment): + """Calculates a balance score of a give assignment + as the sum of assigned partitions size difference of all consumer pairs. + A perfectly balanced assignment (with all consumers getting the same number of partitions) + has a balance score of 0. Lower balance score indicates a more balanced assignment. + + Arguments: + assignment (dict): {consumer: list of assigned topic partitions} + + Returns: + the balance score of the assignment + """ + score = 0 + consumer_to_assignment = {} + for consumer_id, partitions in six.iteritems(assignment): + consumer_to_assignment[consumer_id] = len(partitions) + + consumers_to_explore = set(consumer_to_assignment.keys()) + for consumer_id in consumer_to_assignment.keys(): + if consumer_id in consumers_to_explore: + consumers_to_explore.remove(consumer_id) + for other_consumer_id in consumers_to_explore: + score += abs(consumer_to_assignment[consumer_id] - consumer_to_assignment[other_consumer_id]) + return score + + +class StickyPartitionAssignor(AbstractPartitionAssignor): + """ + https://cwiki.apache.org/confluence/display/KAFKA/KIP-54+-+Sticky+Partition+Assignment+Strategy + + The sticky assignor serves two purposes. First, it guarantees an assignment that is as balanced as possible, meaning either: + - the numbers of topic partitions assigned to consumers differ by at most one; or + - each consumer that has 2+ fewer topic partitions than some other consumer cannot get any of those topic partitions transferred to it. + + Second, it preserved as many existing assignment as possible when a reassignment occurs. + This helps in saving some of the overhead processing when topic partitions move from one consumer to another. + + Starting fresh it would work by distributing the partitions over consumers as evenly as possible. + Even though this may sound similar to how round robin assignor works, the second example below shows that it is not. + During a reassignment it would perform the reassignment in such a way that in the new assignment + - topic partitions are still distributed as evenly as possible, and + - topic partitions stay with their previously assigned consumers as much as possible. + + The first goal above takes precedence over the second one. + + Example 1. + Suppose there are three consumers C0, C1, C2, + four topics t0, t1, t2, t3, and each topic has 2 partitions, + resulting in partitions t0p0, t0p1, t1p0, t1p1, t2p0, t2p1, t3p0, t3p1. + Each consumer is subscribed to all three topics. + + The assignment with both sticky and round robin assignors will be: + - C0: [t0p0, t1p1, t3p0] + - C1: [t0p1, t2p0, t3p1] + - C2: [t1p0, t2p1] + + Now, let's assume C1 is removed and a reassignment is about to happen. The round robin assignor would produce: + - C0: [t0p0, t1p0, t2p0, t3p0] + - C2: [t0p1, t1p1, t2p1, t3p1] + + while the sticky assignor would result in: + - C0 [t0p0, t1p1, t3p0, t2p0] + - C2 [t1p0, t2p1, t0p1, t3p1] + preserving all the previous assignments (unlike the round robin assignor). + + + Example 2. + There are three consumers C0, C1, C2, + and three topics t0, t1, t2, with 1, 2, and 3 partitions respectively. + Therefore, the partitions are t0p0, t1p0, t1p1, t2p0, t2p1, t2p2. + C0 is subscribed to t0; + C1 is subscribed to t0, t1; + and C2 is subscribed to t0, t1, t2. + + The round robin assignor would come up with the following assignment: + - C0 [t0p0] + - C1 [t1p0] + - C2 [t1p1, t2p0, t2p1, t2p2] + + which is not as balanced as the assignment suggested by sticky assignor: + - C0 [t0p0] + - C1 [t1p0, t1p1] + - C2 [t2p0, t2p1, t2p2] + + Now, if consumer C0 is removed, these two assignors would produce the following assignments. + Round Robin (preserves 3 partition assignments): + - C1 [t0p0, t1p1] + - C2 [t1p0, t2p0, t2p1, t2p2] + + Sticky (preserves 5 partition assignments): + - C1 [t1p0, t1p1, t0p0] + - C2 [t2p0, t2p1, t2p2] + """ + + DEFAULT_GENERATION_ID = -1 + + name = "sticky" + version = 0 + + member_assignment = None + generation = DEFAULT_GENERATION_ID + + _latest_partition_movements = None + + @classmethod + def assign(cls, cluster, members): + """Performs group assignment given cluster metadata and member subscriptions + + Arguments: + cluster (ClusterMetadata): cluster metadata + members (dict of {member_id: MemberMetadata}): decoded metadata for each member in the group. + + Returns: + dict: {member_id: MemberAssignment} + """ + members_metadata = {} + for consumer, member_metadata in six.iteritems(members): + members_metadata[consumer] = cls.parse_member_metadata(member_metadata) + + executor = StickyAssignmentExecutor(cluster, members_metadata) + executor.perform_initial_assignment() + executor.balance() + + cls._latest_partition_movements = executor.partition_movements + + assignment = {} + for member_id in members: + assignment[member_id] = ConsumerProtocolMemberAssignment( + cls.version, sorted(executor.get_final_assignment(member_id)), b'' + ) + return assignment + + @classmethod + def parse_member_metadata(cls, metadata): + """ + Parses member metadata into a python object. + This implementation only serializes and deserializes the StickyAssignorMemberMetadataV1 user data, + since no StickyAssignor written in Python was deployed ever in the wild with version V0, meaning that + there is no need to support backward compatibility with V0. + + Arguments: + metadata (MemberMetadata): decoded metadata for a member of the group. + + Returns: + parsed metadata (StickyAssignorMemberMetadataV1) + """ + user_data = metadata.user_data + if not user_data: + return StickyAssignorMemberMetadataV1( + partitions=[], generation=cls.DEFAULT_GENERATION_ID, subscription=metadata.subscription + ) + + try: + decoded_user_data = StickyAssignorUserDataV1.decode(user_data) + except Exception as e: + # ignore the consumer's previous assignment if it cannot be parsed + log.error("Could not parse member data", e) # pylint: disable=logging-too-many-args + return StickyAssignorMemberMetadataV1( + partitions=[], generation=cls.DEFAULT_GENERATION_ID, subscription=metadata.subscription + ) + + member_partitions = [] + for topic, partitions in decoded_user_data.previous_assignment: # pylint: disable=no-member + member_partitions.extend([TopicPartition(topic, partition) for partition in partitions]) + return StickyAssignorMemberMetadataV1( + # pylint: disable=no-member + partitions=member_partitions, generation=decoded_user_data.generation, subscription=metadata.subscription + ) + + @classmethod + def metadata(cls, topics): + return cls._metadata(topics, cls.member_assignment, cls.generation) + + @classmethod + def _metadata(cls, topics, member_assignment_partitions, generation=-1): + if member_assignment_partitions is None: + log.debug("No member assignment available") + user_data = b'' + else: + log.debug("Member assignment is available, generating the metadata: generation {}".format(cls.generation)) + partitions_by_topic = defaultdict(list) + for topic_partition in member_assignment_partitions: + partitions_by_topic[topic_partition.topic].append(topic_partition.partition) + data = StickyAssignorUserDataV1(six.viewitems(partitions_by_topic), generation) + user_data = data.encode() + return ConsumerProtocolMemberMetadata(cls.version, list(topics), user_data) + + @classmethod + def on_assignment(cls, assignment): + """Callback that runs on each assignment. Updates assignor's state. + + Arguments: + assignment: MemberAssignment + """ + log.debug("On assignment: assignment={}".format(assignment)) + cls.member_assignment = assignment.partitions() + + @classmethod + def on_generation_assignment(cls, generation): + """Callback that runs on each assignment. Updates assignor's generation id. + + Arguments: + generation: generation id + """ + log.debug("On generation assignment: generation={}".format(generation)) + cls.generation = generation diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/base.py b/testbed/dpkp__kafka-python/kafka/coordinator/base.py new file mode 100644 index 0000000000000000000000000000000000000000..5e41309df1dfbe00b81520a4eb873f47ad5e9059 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/coordinator/base.py @@ -0,0 +1,1023 @@ +from __future__ import absolute_import, division + +import abc +import copy +import logging +import threading +import time +import weakref + +from kafka.vendor import six + +from kafka.coordinator.heartbeat import Heartbeat +from kafka import errors as Errors +from kafka.future import Future +from kafka.metrics import AnonMeasurable +from kafka.metrics.stats import Avg, Count, Max, Rate +from kafka.protocol.commit import GroupCoordinatorRequest, OffsetCommitRequest +from kafka.protocol.group import (HeartbeatRequest, JoinGroupRequest, + LeaveGroupRequest, SyncGroupRequest) + +log = logging.getLogger('kafka.coordinator') + + +class MemberState(object): + UNJOINED = '' # the client is not part of a group + REBALANCING = '' # the client has begun rebalancing + STABLE = '' # the client has joined and is sending heartbeats + + +class Generation(object): + def __init__(self, generation_id, member_id, protocol): + self.generation_id = generation_id + self.member_id = member_id + self.protocol = protocol + +Generation.NO_GENERATION = Generation( + OffsetCommitRequest[2].DEFAULT_GENERATION_ID, + JoinGroupRequest[0].UNKNOWN_MEMBER_ID, + None) + + +class UnjoinedGroupException(Errors.KafkaError): + retriable = True + + +class BaseCoordinator(object): + """ + BaseCoordinator implements group management for a single group member + by interacting with a designated Kafka broker (the coordinator). Group + semantics are provided by extending this class. See ConsumerCoordinator + for example usage. + + From a high level, Kafka's group management protocol consists of the + following sequence of actions: + + 1. Group Registration: Group members register with the coordinator providing + their own metadata (such as the set of topics they are interested in). + + 2. Group/Leader Selection: The coordinator select the members of the group + and chooses one member as the leader. + + 3. State Assignment: The leader collects the metadata from all the members + of the group and assigns state. + + 4. Group Stabilization: Each member receives the state assigned by the + leader and begins processing. + + To leverage this protocol, an implementation must define the format of + metadata provided by each member for group registration in + :meth:`.group_protocols` and the format of the state assignment provided by + the leader in :meth:`._perform_assignment` and which becomes available to + members in :meth:`._on_join_complete`. + + Note on locking: this class shares state between the caller and a background + thread which is used for sending heartbeats after the client has joined the + group. All mutable state as well as state transitions are protected with the + class's monitor. Generally this means acquiring the lock before reading or + writing the state of the group (e.g. generation, member_id) and holding the + lock when sending a request that affects the state of the group + (e.g. JoinGroup, LeaveGroup). + """ + + DEFAULT_CONFIG = { + 'group_id': 'kafka-python-default-group', + 'session_timeout_ms': 10000, + 'heartbeat_interval_ms': 3000, + 'max_poll_interval_ms': 300000, + 'retry_backoff_ms': 100, + 'api_version': (0, 10, 1), + 'metric_group_prefix': '', + } + + def __init__(self, client, metrics, **configs): + """ + Keyword Arguments: + group_id (str): name of the consumer group to join for dynamic + partition assignment (if enabled), and to use for fetching and + committing offsets. Default: 'kafka-python-default-group' + session_timeout_ms (int): The timeout used to detect failures when + using Kafka's group management facilities. Default: 30000 + heartbeat_interval_ms (int): The expected time in milliseconds + between heartbeats to the consumer coordinator when using + Kafka's group management feature. Heartbeats are used to ensure + that the consumer's session stays active and to facilitate + rebalancing when new consumers join or leave the group. The + value must be set lower than session_timeout_ms, but typically + should be set no higher than 1/3 of that value. It can be + adjusted even lower to control the expected time for normal + rebalances. Default: 3000 + retry_backoff_ms (int): Milliseconds to backoff when retrying on + errors. Default: 100. + """ + self.config = copy.copy(self.DEFAULT_CONFIG) + for key in self.config: + if key in configs: + self.config[key] = configs[key] + + if self.config['api_version'] < (0, 10, 1): + if self.config['max_poll_interval_ms'] != self.config['session_timeout_ms']: + raise Errors.KafkaConfigurationError("Broker version %s does not support " + "different values for max_poll_interval_ms " + "and session_timeout_ms") + + self._client = client + self.group_id = self.config['group_id'] + self.heartbeat = Heartbeat(**self.config) + self._heartbeat_thread = None + self._lock = threading.Condition() + self.rejoin_needed = True + self.rejoining = False # renamed / complement of java needsJoinPrepare + self.state = MemberState.UNJOINED + self.join_future = None + self.coordinator_id = None + self._find_coordinator_future = None + self._generation = Generation.NO_GENERATION + self.sensors = GroupCoordinatorMetrics(self.heartbeat, metrics, + self.config['metric_group_prefix']) + + @abc.abstractmethod + def protocol_type(self): + """ + Unique identifier for the class of supported protocols + (e.g. "consumer" or "connect"). + + Returns: + str: protocol type name + """ + pass + + @abc.abstractmethod + def group_protocols(self): + """Return the list of supported group protocols and metadata. + + This list is submitted by each group member via a JoinGroupRequest. + The order of the protocols in the list indicates the preference of the + protocol (the first entry is the most preferred). The coordinator takes + this preference into account when selecting the generation protocol + (generally more preferred protocols will be selected as long as all + members support them and there is no disagreement on the preference). + + Note: metadata must be type bytes or support an encode() method + + Returns: + list: [(protocol, metadata), ...] + """ + pass + + @abc.abstractmethod + def _on_join_prepare(self, generation, member_id): + """Invoked prior to each group join or rejoin. + + This is typically used to perform any cleanup from the previous + generation (such as committing offsets for the consumer) + + Arguments: + generation (int): The previous generation or -1 if there was none + member_id (str): The identifier of this member in the previous group + or '' if there was none + """ + pass + + @abc.abstractmethod + def _perform_assignment(self, leader_id, protocol, members): + """Perform assignment for the group. + + This is used by the leader to push state to all the members of the group + (e.g. to push partition assignments in the case of the new consumer) + + Arguments: + leader_id (str): The id of the leader (which is this member) + protocol (str): the chosen group protocol (assignment strategy) + members (list): [(member_id, metadata_bytes)] from + JoinGroupResponse. metadata_bytes are associated with the chosen + group protocol, and the Coordinator subclass is responsible for + decoding metadata_bytes based on that protocol. + + Returns: + dict: {member_id: assignment}; assignment must either be bytes + or have an encode() method to convert to bytes + """ + pass + + @abc.abstractmethod + def _on_join_complete(self, generation, member_id, protocol, + member_assignment_bytes): + """Invoked when a group member has successfully joined a group. + + Arguments: + generation (int): the generation that was joined + member_id (str): the identifier for the local member in the group + protocol (str): the protocol selected by the coordinator + member_assignment_bytes (bytes): the protocol-encoded assignment + propagated from the group leader. The Coordinator instance is + responsible for decoding based on the chosen protocol. + """ + pass + + def coordinator_unknown(self): + """Check if we know who the coordinator is and have an active connection + + Side-effect: reset coordinator_id to None if connection failed + + Returns: + bool: True if the coordinator is unknown + """ + return self.coordinator() is None + + def coordinator(self): + """Get the current coordinator + + Returns: the current coordinator id or None if it is unknown + """ + if self.coordinator_id is None: + return None + elif self._client.is_disconnected(self.coordinator_id): + self.coordinator_dead('Node Disconnected') + return None + else: + return self.coordinator_id + + def ensure_coordinator_ready(self): + """Block until the coordinator for this group is known + (and we have an active connection -- java client uses unsent queue). + """ + with self._client._lock, self._lock: + while self.coordinator_unknown(): + + # Prior to 0.8.2 there was no group coordinator + # so we will just pick a node at random and treat + # it as the "coordinator" + if self.config['api_version'] < (0, 8, 2): + self.coordinator_id = self._client.least_loaded_node() + if self.coordinator_id is not None: + self._client.maybe_connect(self.coordinator_id) + continue + + future = self.lookup_coordinator() + self._client.poll(future=future) + + if future.failed(): + if future.retriable(): + if getattr(future.exception, 'invalid_metadata', False): + log.debug('Requesting metadata for group coordinator request: %s', future.exception) + metadata_update = self._client.cluster.request_update() + self._client.poll(future=metadata_update) + else: + time.sleep(self.config['retry_backoff_ms'] / 1000) + else: + raise future.exception # pylint: disable-msg=raising-bad-type + + def _reset_find_coordinator_future(self, result): + self._find_coordinator_future = None + + def lookup_coordinator(self): + with self._lock: + if self._find_coordinator_future is not None: + return self._find_coordinator_future + + # If there is an error sending the group coordinator request + # then _reset_find_coordinator_future will immediately fire and + # set _find_coordinator_future = None + # To avoid returning None, we capture the future in a local variable + future = self._send_group_coordinator_request() + self._find_coordinator_future = future + self._find_coordinator_future.add_both(self._reset_find_coordinator_future) + return future + + def need_rejoin(self): + """Check whether the group should be rejoined (e.g. if metadata changes) + + Returns: + bool: True if it should, False otherwise + """ + return self.rejoin_needed + + def poll_heartbeat(self): + """ + Check the status of the heartbeat thread (if it is active) and indicate + the liveness of the client. This must be called periodically after + joining with :meth:`.ensure_active_group` to ensure that the member stays + in the group. If an interval of time longer than the provided rebalance + timeout (max_poll_interval_ms) expires without calling this method, then + the client will proactively leave the group. + + Raises: RuntimeError for unexpected errors raised from the heartbeat thread + """ + with self._lock: + if self._heartbeat_thread is not None: + if self._heartbeat_thread.failed: + # set the heartbeat thread to None and raise an exception. + # If the user catches it, the next call to ensure_active_group() + # will spawn a new heartbeat thread. + cause = self._heartbeat_thread.failed + self._heartbeat_thread = None + raise cause # pylint: disable-msg=raising-bad-type + + # Awake the heartbeat thread if needed + if self.heartbeat.should_heartbeat(): + self._lock.notify() + self.heartbeat.poll() + + def time_to_next_heartbeat(self): + """Returns seconds (float) remaining before next heartbeat should be sent + + Note: Returns infinite if group is not joined + """ + with self._lock: + # if we have not joined the group, we don't need to send heartbeats + if self.state is MemberState.UNJOINED: + return float('inf') + return self.heartbeat.time_to_next_heartbeat() + + def _handle_join_success(self, member_assignment_bytes): + with self._lock: + log.info("Successfully joined group %s with generation %s", + self.group_id, self._generation.generation_id) + self.state = MemberState.STABLE + self.rejoin_needed = False + if self._heartbeat_thread: + self._heartbeat_thread.enable() + + def _handle_join_failure(self, _): + with self._lock: + self.state = MemberState.UNJOINED + + def ensure_active_group(self): + """Ensure that the group is active (i.e. joined and synced)""" + with self._client._lock, self._lock: + if self._heartbeat_thread is None: + self._start_heartbeat_thread() + + while self.need_rejoin() or self._rejoin_incomplete(): + self.ensure_coordinator_ready() + + # call on_join_prepare if needed. We set a flag + # to make sure that we do not call it a second + # time if the client is woken up before a pending + # rebalance completes. This must be called on each + # iteration of the loop because an event requiring + # a rebalance (such as a metadata refresh which + # changes the matched subscription set) can occur + # while another rebalance is still in progress. + if not self.rejoining: + self._on_join_prepare(self._generation.generation_id, + self._generation.member_id) + self.rejoining = True + + # ensure that there are no pending requests to the coordinator. + # This is important in particular to avoid resending a pending + # JoinGroup request. + while not self.coordinator_unknown(): + if not self._client.in_flight_request_count(self.coordinator_id): + break + self._client.poll() + else: + continue + + # we store the join future in case we are woken up by the user + # after beginning the rebalance in the call to poll below. + # This ensures that we do not mistakenly attempt to rejoin + # before the pending rebalance has completed. + if self.join_future is None: + # Fence off the heartbeat thread explicitly so that it cannot + # interfere with the join group. Note that this must come after + # the call to _on_join_prepare since we must be able to continue + # sending heartbeats if that callback takes some time. + self._heartbeat_thread.disable() + + self.state = MemberState.REBALANCING + future = self._send_join_group_request() + + self.join_future = future # this should happen before adding callbacks + + # handle join completion in the callback so that the + # callback will be invoked even if the consumer is woken up + # before finishing the rebalance + future.add_callback(self._handle_join_success) + + # we handle failures below after the request finishes. + # If the join completes after having been woken up, the + # exception is ignored and we will rejoin + future.add_errback(self._handle_join_failure) + + else: + future = self.join_future + + self._client.poll(future=future) + + if future.succeeded(): + self._on_join_complete(self._generation.generation_id, + self._generation.member_id, + self._generation.protocol, + future.value) + self.join_future = None + self.rejoining = False + + else: + self.join_future = None + exception = future.exception + if isinstance(exception, (Errors.UnknownMemberIdError, + Errors.RebalanceInProgressError, + Errors.IllegalGenerationError)): + continue + elif not future.retriable(): + raise exception # pylint: disable-msg=raising-bad-type + time.sleep(self.config['retry_backoff_ms'] / 1000) + + def _rejoin_incomplete(self): + return self.join_future is not None + + def _send_join_group_request(self): + """Join the group and return the assignment for the next generation. + + This function handles both JoinGroup and SyncGroup, delegating to + :meth:`._perform_assignment` if elected leader by the coordinator. + + Returns: + Future: resolves to the encoded-bytes assignment returned from the + group leader + """ + if self.coordinator_unknown(): + e = Errors.GroupCoordinatorNotAvailableError(self.coordinator_id) + return Future().failure(e) + + elif not self._client.ready(self.coordinator_id, metadata_priority=False): + e = Errors.NodeNotReadyError(self.coordinator_id) + return Future().failure(e) + + # send a join group request to the coordinator + log.info("(Re-)joining group %s", self.group_id) + member_metadata = [ + (protocol, metadata if isinstance(metadata, bytes) else metadata.encode()) + for protocol, metadata in self.group_protocols() + ] + if self.config['api_version'] < (0, 9): + raise Errors.KafkaError('JoinGroupRequest api requires 0.9+ brokers') + elif (0, 9) <= self.config['api_version'] < (0, 10, 1): + request = JoinGroupRequest[0]( + self.group_id, + self.config['session_timeout_ms'], + self._generation.member_id, + self.protocol_type(), + member_metadata) + elif (0, 10, 1) <= self.config['api_version'] < (0, 11, 0): + request = JoinGroupRequest[1]( + self.group_id, + self.config['session_timeout_ms'], + self.config['max_poll_interval_ms'], + self._generation.member_id, + self.protocol_type(), + member_metadata) + else: + request = JoinGroupRequest[2]( + self.group_id, + self.config['session_timeout_ms'], + self.config['max_poll_interval_ms'], + self._generation.member_id, + self.protocol_type(), + member_metadata) + + # create the request for the coordinator + log.debug("Sending JoinGroup (%s) to coordinator %s", request, self.coordinator_id) + future = Future() + _f = self._client.send(self.coordinator_id, request) + _f.add_callback(self._handle_join_group_response, future, time.time()) + _f.add_errback(self._failed_request, self.coordinator_id, + request, future) + return future + + def _failed_request(self, node_id, request, future, error): + # Marking coordinator dead + # unless the error is caused by internal client pipelining + if not isinstance(error, (Errors.NodeNotReadyError, + Errors.TooManyInFlightRequests)): + log.error('Error sending %s to node %s [%s]', + request.__class__.__name__, node_id, error) + self.coordinator_dead(error) + else: + log.debug('Error sending %s to node %s [%s]', + request.__class__.__name__, node_id, error) + future.failure(error) + + def _handle_join_group_response(self, future, send_time, response): + error_type = Errors.for_code(response.error_code) + if error_type is Errors.NoError: + log.debug("Received successful JoinGroup response for group %s: %s", + self.group_id, response) + self.sensors.join_latency.record((time.time() - send_time) * 1000) + with self._lock: + if self.state is not MemberState.REBALANCING: + # if the consumer was woken up before a rebalance completes, + # we may have already left the group. In this case, we do + # not want to continue with the sync group. + future.failure(UnjoinedGroupException()) + else: + self._generation = Generation(response.generation_id, + response.member_id, + response.group_protocol) + + if response.leader_id == response.member_id: + log.info("Elected group leader -- performing partition" + " assignments using %s", self._generation.protocol) + self._on_join_leader(response).chain(future) + else: + self._on_join_follower().chain(future) + + elif error_type is Errors.GroupLoadInProgressError: + log.debug("Attempt to join group %s rejected since coordinator %s" + " is loading the group.", self.group_id, self.coordinator_id) + # backoff and retry + future.failure(error_type(response)) + elif error_type is Errors.UnknownMemberIdError: + # reset the member id and retry immediately + error = error_type(self._generation.member_id) + self.reset_generation() + log.debug("Attempt to join group %s failed due to unknown member id", + self.group_id) + future.failure(error) + elif error_type in (Errors.GroupCoordinatorNotAvailableError, + Errors.NotCoordinatorForGroupError): + # re-discover the coordinator and retry with backoff + self.coordinator_dead(error_type()) + log.debug("Attempt to join group %s failed due to obsolete " + "coordinator information: %s", self.group_id, + error_type.__name__) + future.failure(error_type()) + elif error_type in (Errors.InconsistentGroupProtocolError, + Errors.InvalidSessionTimeoutError, + Errors.InvalidGroupIdError): + # log the error and re-throw the exception + error = error_type(response) + log.error("Attempt to join group %s failed due to fatal error: %s", + self.group_id, error) + future.failure(error) + elif error_type is Errors.GroupAuthorizationFailedError: + future.failure(error_type(self.group_id)) + else: + # unexpected error, throw the exception + error = error_type() + log.error("Unexpected error in join group response: %s", error) + future.failure(error) + + def _on_join_follower(self): + # send follower's sync group with an empty assignment + version = 0 if self.config['api_version'] < (0, 11, 0) else 1 + request = SyncGroupRequest[version]( + self.group_id, + self._generation.generation_id, + self._generation.member_id, + {}) + log.debug("Sending follower SyncGroup for group %s to coordinator %s: %s", + self.group_id, self.coordinator_id, request) + return self._send_sync_group_request(request) + + def _on_join_leader(self, response): + """ + Perform leader synchronization and send back the assignment + for the group via SyncGroupRequest + + Arguments: + response (JoinResponse): broker response to parse + + Returns: + Future: resolves to member assignment encoded-bytes + """ + try: + group_assignment = self._perform_assignment(response.leader_id, + response.group_protocol, + response.members) + except Exception as e: + return Future().failure(e) + + version = 0 if self.config['api_version'] < (0, 11, 0) else 1 + request = SyncGroupRequest[version]( + self.group_id, + self._generation.generation_id, + self._generation.member_id, + [(member_id, + assignment if isinstance(assignment, bytes) else assignment.encode()) + for member_id, assignment in six.iteritems(group_assignment)]) + + log.debug("Sending leader SyncGroup for group %s to coordinator %s: %s", + self.group_id, self.coordinator_id, request) + return self._send_sync_group_request(request) + + def _send_sync_group_request(self, request): + if self.coordinator_unknown(): + e = Errors.GroupCoordinatorNotAvailableError(self.coordinator_id) + return Future().failure(e) + + # We assume that coordinator is ready if we're sending SyncGroup + # as it typically follows a successful JoinGroup + # Also note that if client.ready() enforces a metadata priority policy, + # we can get into an infinite loop if the leader assignment process + # itself requests a metadata update + + future = Future() + _f = self._client.send(self.coordinator_id, request) + _f.add_callback(self._handle_sync_group_response, future, time.time()) + _f.add_errback(self._failed_request, self.coordinator_id, + request, future) + return future + + def _handle_sync_group_response(self, future, send_time, response): + error_type = Errors.for_code(response.error_code) + if error_type is Errors.NoError: + self.sensors.sync_latency.record((time.time() - send_time) * 1000) + future.success(response.member_assignment) + return + + # Always rejoin on error + self.request_rejoin() + if error_type is Errors.GroupAuthorizationFailedError: + future.failure(error_type(self.group_id)) + elif error_type is Errors.RebalanceInProgressError: + log.debug("SyncGroup for group %s failed due to coordinator" + " rebalance", self.group_id) + future.failure(error_type(self.group_id)) + elif error_type in (Errors.UnknownMemberIdError, + Errors.IllegalGenerationError): + error = error_type() + log.debug("SyncGroup for group %s failed due to %s", self.group_id, error) + self.reset_generation() + future.failure(error) + elif error_type in (Errors.GroupCoordinatorNotAvailableError, + Errors.NotCoordinatorForGroupError): + error = error_type() + log.debug("SyncGroup for group %s failed due to %s", self.group_id, error) + self.coordinator_dead(error) + future.failure(error) + else: + error = error_type() + log.error("Unexpected error from SyncGroup: %s", error) + future.failure(error) + + def _send_group_coordinator_request(self): + """Discover the current coordinator for the group. + + Returns: + Future: resolves to the node id of the coordinator + """ + node_id = self._client.least_loaded_node() + if node_id is None: + return Future().failure(Errors.NoBrokersAvailable()) + + elif not self._client.ready(node_id, metadata_priority=False): + e = Errors.NodeNotReadyError(node_id) + return Future().failure(e) + + log.debug("Sending group coordinator request for group %s to broker %s", + self.group_id, node_id) + request = GroupCoordinatorRequest[0](self.group_id) + future = Future() + _f = self._client.send(node_id, request) + _f.add_callback(self._handle_group_coordinator_response, future) + _f.add_errback(self._failed_request, node_id, request, future) + return future + + def _handle_group_coordinator_response(self, future, response): + log.debug("Received group coordinator response %s", response) + + error_type = Errors.for_code(response.error_code) + if error_type is Errors.NoError: + with self._lock: + coordinator_id = self._client.cluster.add_group_coordinator(self.group_id, response) + if not coordinator_id: + # This could happen if coordinator metadata is different + # than broker metadata + future.failure(Errors.IllegalStateError()) + return + + self.coordinator_id = coordinator_id + log.info("Discovered coordinator %s for group %s", + self.coordinator_id, self.group_id) + self._client.maybe_connect(self.coordinator_id) + self.heartbeat.reset_timeouts() + future.success(self.coordinator_id) + + elif error_type is Errors.GroupCoordinatorNotAvailableError: + log.debug("Group Coordinator Not Available; retry") + future.failure(error_type()) + elif error_type is Errors.GroupAuthorizationFailedError: + error = error_type(self.group_id) + log.error("Group Coordinator Request failed: %s", error) + future.failure(error) + else: + error = error_type() + log.error("Group coordinator lookup for group %s failed: %s", + self.group_id, error) + future.failure(error) + + def coordinator_dead(self, error): + """Mark the current coordinator as dead.""" + if self.coordinator_id is not None: + log.warning("Marking the coordinator dead (node %s) for group %s: %s.", + self.coordinator_id, self.group_id, error) + self.coordinator_id = None + + def generation(self): + """Get the current generation state if the group is stable. + + Returns: the current generation or None if the group is unjoined/rebalancing + """ + with self._lock: + if self.state is not MemberState.STABLE: + return None + return self._generation + + def reset_generation(self): + """Reset the generation and memberId because we have fallen out of the group.""" + with self._lock: + self._generation = Generation.NO_GENERATION + self.rejoin_needed = True + self.state = MemberState.UNJOINED + + def request_rejoin(self): + self.rejoin_needed = True + + def _start_heartbeat_thread(self): + if self._heartbeat_thread is None: + log.info('Starting new heartbeat thread') + self._heartbeat_thread = HeartbeatThread(weakref.proxy(self)) + self._heartbeat_thread.daemon = True + self._heartbeat_thread.start() + + def _close_heartbeat_thread(self): + if self._heartbeat_thread is not None: + log.info('Stopping heartbeat thread') + try: + self._heartbeat_thread.close() + except ReferenceError: + pass + self._heartbeat_thread = None + + def __del__(self): + self._close_heartbeat_thread() + + def close(self): + """Close the coordinator, leave the current group, + and reset local generation / member_id""" + self._close_heartbeat_thread() + self.maybe_leave_group() + + def maybe_leave_group(self): + """Leave the current group and reset local generation/memberId.""" + with self._client._lock, self._lock: + if (not self.coordinator_unknown() + and self.state is not MemberState.UNJOINED + and self._generation is not Generation.NO_GENERATION): + + # this is a minimal effort attempt to leave the group. we do not + # attempt any resending if the request fails or times out. + log.info('Leaving consumer group (%s).', self.group_id) + version = 0 if self.config['api_version'] < (0, 11, 0) else 1 + request = LeaveGroupRequest[version](self.group_id, self._generation.member_id) + future = self._client.send(self.coordinator_id, request) + future.add_callback(self._handle_leave_group_response) + future.add_errback(log.error, "LeaveGroup request failed: %s") + self._client.poll(future=future) + + self.reset_generation() + + def _handle_leave_group_response(self, response): + error_type = Errors.for_code(response.error_code) + if error_type is Errors.NoError: + log.debug("LeaveGroup request for group %s returned successfully", + self.group_id) + else: + log.error("LeaveGroup request for group %s failed with error: %s", + self.group_id, error_type()) + + def _send_heartbeat_request(self): + """Send a heartbeat request""" + if self.coordinator_unknown(): + e = Errors.GroupCoordinatorNotAvailableError(self.coordinator_id) + return Future().failure(e) + + elif not self._client.ready(self.coordinator_id, metadata_priority=False): + e = Errors.NodeNotReadyError(self.coordinator_id) + return Future().failure(e) + + version = 0 if self.config['api_version'] < (0, 11, 0) else 1 + request = HeartbeatRequest[version](self.group_id, + self._generation.generation_id, + self._generation.member_id) + log.debug("Heartbeat: %s[%s] %s", request.group, request.generation_id, request.member_id) # pylint: disable-msg=no-member + future = Future() + _f = self._client.send(self.coordinator_id, request) + _f.add_callback(self._handle_heartbeat_response, future, time.time()) + _f.add_errback(self._failed_request, self.coordinator_id, + request, future) + return future + + def _handle_heartbeat_response(self, future, send_time, response): + self.sensors.heartbeat_latency.record((time.time() - send_time) * 1000) + error_type = Errors.for_code(response.error_code) + if error_type is Errors.NoError: + log.debug("Received successful heartbeat response for group %s", + self.group_id) + future.success(None) + elif error_type in (Errors.GroupCoordinatorNotAvailableError, + Errors.NotCoordinatorForGroupError): + log.warning("Heartbeat failed for group %s: coordinator (node %s)" + " is either not started or not valid", self.group_id, + self.coordinator()) + self.coordinator_dead(error_type()) + future.failure(error_type()) + elif error_type is Errors.RebalanceInProgressError: + log.warning("Heartbeat failed for group %s because it is" + " rebalancing", self.group_id) + self.request_rejoin() + future.failure(error_type()) + elif error_type is Errors.IllegalGenerationError: + log.warning("Heartbeat failed for group %s: generation id is not " + " current.", self.group_id) + self.reset_generation() + future.failure(error_type()) + elif error_type is Errors.UnknownMemberIdError: + log.warning("Heartbeat: local member_id was not recognized;" + " this consumer needs to re-join") + self.reset_generation() + future.failure(error_type) + elif error_type is Errors.GroupAuthorizationFailedError: + error = error_type(self.group_id) + log.error("Heartbeat failed: authorization error: %s", error) + future.failure(error) + else: + error = error_type() + log.error("Heartbeat failed: Unhandled error: %s", error) + future.failure(error) + + +class GroupCoordinatorMetrics(object): + def __init__(self, heartbeat, metrics, prefix, tags=None): + self.heartbeat = heartbeat + self.metrics = metrics + self.metric_group_name = prefix + "-coordinator-metrics" + + self.heartbeat_latency = metrics.sensor('heartbeat-latency') + self.heartbeat_latency.add(metrics.metric_name( + 'heartbeat-response-time-max', self.metric_group_name, + 'The max time taken to receive a response to a heartbeat request', + tags), Max()) + self.heartbeat_latency.add(metrics.metric_name( + 'heartbeat-rate', self.metric_group_name, + 'The average number of heartbeats per second', + tags), Rate(sampled_stat=Count())) + + self.join_latency = metrics.sensor('join-latency') + self.join_latency.add(metrics.metric_name( + 'join-time-avg', self.metric_group_name, + 'The average time taken for a group rejoin', + tags), Avg()) + self.join_latency.add(metrics.metric_name( + 'join-time-max', self.metric_group_name, + 'The max time taken for a group rejoin', + tags), Max()) + self.join_latency.add(metrics.metric_name( + 'join-rate', self.metric_group_name, + 'The number of group joins per second', + tags), Rate(sampled_stat=Count())) + + self.sync_latency = metrics.sensor('sync-latency') + self.sync_latency.add(metrics.metric_name( + 'sync-time-avg', self.metric_group_name, + 'The average time taken for a group sync', + tags), Avg()) + self.sync_latency.add(metrics.metric_name( + 'sync-time-max', self.metric_group_name, + 'The max time taken for a group sync', + tags), Max()) + self.sync_latency.add(metrics.metric_name( + 'sync-rate', self.metric_group_name, + 'The number of group syncs per second', + tags), Rate(sampled_stat=Count())) + + metrics.add_metric(metrics.metric_name( + 'last-heartbeat-seconds-ago', self.metric_group_name, + 'The number of seconds since the last controller heartbeat was sent', + tags), AnonMeasurable( + lambda _, now: (now / 1000) - self.heartbeat.last_send)) + + +class HeartbeatThread(threading.Thread): + def __init__(self, coordinator): + super(HeartbeatThread, self).__init__() + self.name = coordinator.group_id + '-heartbeat' + self.coordinator = coordinator + self.enabled = False + self.closed = False + self.failed = None + + def enable(self): + with self.coordinator._lock: + self.enabled = True + self.coordinator.heartbeat.reset_timeouts() + self.coordinator._lock.notify() + + def disable(self): + self.enabled = False + + def close(self): + self.closed = True + with self.coordinator._lock: + self.coordinator._lock.notify() + if self.is_alive(): + self.join(self.coordinator.config['heartbeat_interval_ms'] / 1000) + if self.is_alive(): + log.warning("Heartbeat thread did not fully terminate during close") + + def run(self): + try: + log.debug('Heartbeat thread started') + while not self.closed: + self._run_once() + + except ReferenceError: + log.debug('Heartbeat thread closed due to coordinator gc') + + except RuntimeError as e: + log.error("Heartbeat thread for group %s failed due to unexpected error: %s", + self.coordinator.group_id, e) + self.failed = e + + finally: + log.debug('Heartbeat thread closed') + + def _run_once(self): + with self.coordinator._client._lock, self.coordinator._lock: + if self.enabled and self.coordinator.state is MemberState.STABLE: + # TODO: When consumer.wakeup() is implemented, we need to + # disable here to prevent propagating an exception to this + # heartbeat thread + # must get client._lock, or maybe deadlock at heartbeat + # failure callbak in consumer poll + self.coordinator._client.poll(timeout_ms=0) + + with self.coordinator._lock: + if not self.enabled: + log.debug('Heartbeat disabled. Waiting') + self.coordinator._lock.wait() + log.debug('Heartbeat re-enabled.') + return + + if self.coordinator.state is not MemberState.STABLE: + # the group is not stable (perhaps because we left the + # group or because the coordinator kicked us out), so + # disable heartbeats and wait for the main thread to rejoin. + log.debug('Group state is not stable, disabling heartbeats') + self.disable() + return + + if self.coordinator.coordinator_unknown(): + future = self.coordinator.lookup_coordinator() + if not future.is_done or future.failed(): + # the immediate future check ensures that we backoff + # properly in the case that no brokers are available + # to connect to (and the future is automatically failed). + self.coordinator._lock.wait(self.coordinator.config['retry_backoff_ms'] / 1000) + + elif self.coordinator.heartbeat.session_timeout_expired(): + # the session timeout has expired without seeing a + # successful heartbeat, so we should probably make sure + # the coordinator is still healthy. + log.warning('Heartbeat session expired, marking coordinator dead') + self.coordinator.coordinator_dead('Heartbeat session expired') + + elif self.coordinator.heartbeat.poll_timeout_expired(): + # the poll timeout has expired, which means that the + # foreground thread has stalled in between calls to + # poll(), so we explicitly leave the group. + log.warning('Heartbeat poll expired, leaving group') + self.coordinator.maybe_leave_group() + + elif not self.coordinator.heartbeat.should_heartbeat(): + # poll again after waiting for the retry backoff in case + # the heartbeat failed or the coordinator disconnected + log.log(0, 'Not ready to heartbeat, waiting') + self.coordinator._lock.wait(self.coordinator.config['retry_backoff_ms'] / 1000) + + else: + self.coordinator.heartbeat.sent_heartbeat() + future = self.coordinator._send_heartbeat_request() + future.add_callback(self._handle_heartbeat_success) + future.add_errback(self._handle_heartbeat_failure) + + def _handle_heartbeat_success(self, result): + with self.coordinator._lock: + self.coordinator.heartbeat.received_heartbeat() + + def _handle_heartbeat_failure(self, exception): + with self.coordinator._lock: + if isinstance(exception, Errors.RebalanceInProgressError): + # it is valid to continue heartbeating while the group is + # rebalancing. This ensures that the coordinator keeps the + # member in the group for as long as the duration of the + # rebalance timeout. If we stop sending heartbeats, however, + # then the session timeout may expire before we can rejoin. + self.coordinator.heartbeat.received_heartbeat() + else: + self.coordinator.heartbeat.fail_heartbeat() + # wake up the thread if it's sleeping to reschedule the heartbeat + self.coordinator._lock.notify() diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/consumer.py b/testbed/dpkp__kafka-python/kafka/coordinator/consumer.py new file mode 100644 index 0000000000000000000000000000000000000000..971f5e8022d5c2caadda8f66be2be32e153e9cec --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/coordinator/consumer.py @@ -0,0 +1,833 @@ +from __future__ import absolute_import, division + +import collections +import copy +import functools +import logging +import time + +from kafka.vendor import six + +from kafka.coordinator.base import BaseCoordinator, Generation +from kafka.coordinator.assignors.range import RangePartitionAssignor +from kafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor +from kafka.coordinator.assignors.sticky.sticky_assignor import StickyPartitionAssignor +from kafka.coordinator.protocol import ConsumerProtocol +import kafka.errors as Errors +from kafka.future import Future +from kafka.metrics import AnonMeasurable +from kafka.metrics.stats import Avg, Count, Max, Rate +from kafka.protocol.commit import OffsetCommitRequest, OffsetFetchRequest +from kafka.structs import OffsetAndMetadata, TopicPartition +from kafka.util import WeakMethod + + +log = logging.getLogger(__name__) + + +class ConsumerCoordinator(BaseCoordinator): + """This class manages the coordination process with the consumer coordinator.""" + DEFAULT_CONFIG = { + 'group_id': 'kafka-python-default-group', + 'enable_auto_commit': True, + 'auto_commit_interval_ms': 5000, + 'default_offset_commit_callback': None, + 'assignors': (RangePartitionAssignor, RoundRobinPartitionAssignor, StickyPartitionAssignor), + 'session_timeout_ms': 10000, + 'heartbeat_interval_ms': 3000, + 'max_poll_interval_ms': 300000, + 'retry_backoff_ms': 100, + 'api_version': (0, 10, 1), + 'exclude_internal_topics': True, + 'metric_group_prefix': 'consumer' + } + + def __init__(self, client, subscription, metrics, **configs): + """Initialize the coordination manager. + + Keyword Arguments: + group_id (str): name of the consumer group to join for dynamic + partition assignment (if enabled), and to use for fetching and + committing offsets. Default: 'kafka-python-default-group' + enable_auto_commit (bool): If true the consumer's offset will be + periodically committed in the background. Default: True. + auto_commit_interval_ms (int): milliseconds between automatic + offset commits, if enable_auto_commit is True. Default: 5000. + default_offset_commit_callback (callable): called as + callback(offsets, exception) response will be either an Exception + or None. This callback can be used to trigger custom actions when + a commit request completes. + assignors (list): List of objects to use to distribute partition + ownership amongst consumer instances when group management is + used. Default: [RangePartitionAssignor, RoundRobinPartitionAssignor] + heartbeat_interval_ms (int): The expected time in milliseconds + between heartbeats to the consumer coordinator when using + Kafka's group management feature. Heartbeats are used to ensure + that the consumer's session stays active and to facilitate + rebalancing when new consumers join or leave the group. The + value must be set lower than session_timeout_ms, but typically + should be set no higher than 1/3 of that value. It can be + adjusted even lower to control the expected time for normal + rebalances. Default: 3000 + session_timeout_ms (int): The timeout used to detect failures when + using Kafka's group management facilities. Default: 30000 + retry_backoff_ms (int): Milliseconds to backoff when retrying on + errors. Default: 100. + exclude_internal_topics (bool): Whether records from internal topics + (such as offsets) should be exposed to the consumer. If set to + True the only way to receive records from an internal topic is + subscribing to it. Requires 0.10+. Default: True + """ + super(ConsumerCoordinator, self).__init__(client, metrics, **configs) + + self.config = copy.copy(self.DEFAULT_CONFIG) + for key in self.config: + if key in configs: + self.config[key] = configs[key] + + self._subscription = subscription + self._is_leader = False + self._joined_subscription = set() + self._metadata_snapshot = self._build_metadata_snapshot(subscription, client.cluster) + self._assignment_snapshot = None + self._cluster = client.cluster + self.auto_commit_interval = self.config['auto_commit_interval_ms'] / 1000 + self.next_auto_commit_deadline = None + self.completed_offset_commits = collections.deque() + + if self.config['default_offset_commit_callback'] is None: + self.config['default_offset_commit_callback'] = self._default_offset_commit_callback + + if self.config['group_id'] is not None: + if self.config['api_version'] >= (0, 9): + if not self.config['assignors']: + raise Errors.KafkaConfigurationError('Coordinator requires assignors') + if self.config['api_version'] < (0, 10, 1): + if self.config['max_poll_interval_ms'] != self.config['session_timeout_ms']: + raise Errors.KafkaConfigurationError("Broker version %s does not support " + "different values for max_poll_interval_ms " + "and session_timeout_ms") + + if self.config['enable_auto_commit']: + if self.config['api_version'] < (0, 8, 1): + log.warning('Broker version (%s) does not support offset' + ' commits; disabling auto-commit.', + self.config['api_version']) + self.config['enable_auto_commit'] = False + elif self.config['group_id'] is None: + log.warning('group_id is None: disabling auto-commit.') + self.config['enable_auto_commit'] = False + else: + self.next_auto_commit_deadline = time.time() + self.auto_commit_interval + + self.consumer_sensors = ConsumerCoordinatorMetrics( + metrics, self.config['metric_group_prefix'], self._subscription) + + self._cluster.request_update() + self._cluster.add_listener(WeakMethod(self._handle_metadata_update)) + + def __del__(self): + if hasattr(self, '_cluster') and self._cluster: + self._cluster.remove_listener(WeakMethod(self._handle_metadata_update)) + super(ConsumerCoordinator, self).__del__() + + def protocol_type(self): + return ConsumerProtocol.PROTOCOL_TYPE + + def group_protocols(self): + """Returns list of preferred (protocols, metadata)""" + if self._subscription.subscription is None: + raise Errors.IllegalStateError('Consumer has not subscribed to topics') + # dpkp note: I really dislike this. + # why? because we are using this strange method group_protocols, + # which is seemingly innocuous, to set internal state (_joined_subscription) + # that is later used to check whether metadata has changed since we joined a group + # but there is no guarantee that this method, group_protocols, will get called + # in the correct sequence or that it will only be called when we want it to be. + # So this really should be moved elsewhere, but I don't have the energy to + # work that out right now. If you read this at some later date after the mutable + # state has bitten you... I'm sorry! It mimics the java client, and that's the + # best I've got for now. + self._joined_subscription = set(self._subscription.subscription) + metadata_list = [] + for assignor in self.config['assignors']: + metadata = assignor.metadata(self._joined_subscription) + group_protocol = (assignor.name, metadata) + metadata_list.append(group_protocol) + return metadata_list + + def _handle_metadata_update(self, cluster): + # if we encounter any unauthorized topics, raise an exception + if cluster.unauthorized_topics: + raise Errors.TopicAuthorizationFailedError(cluster.unauthorized_topics) + + if self._subscription.subscribed_pattern: + topics = [] + for topic in cluster.topics(self.config['exclude_internal_topics']): + if self._subscription.subscribed_pattern.match(topic): + topics.append(topic) + + if set(topics) != self._subscription.subscription: + self._subscription.change_subscription(topics) + self._client.set_topics(self._subscription.group_subscription()) + + # check if there are any changes to the metadata which should trigger + # a rebalance + if self._subscription.partitions_auto_assigned(): + metadata_snapshot = self._build_metadata_snapshot(self._subscription, cluster) + if self._metadata_snapshot != metadata_snapshot: + self._metadata_snapshot = metadata_snapshot + + # If we haven't got group coordinator support, + # just assign all partitions locally + if self._auto_assign_all_partitions(): + self._subscription.assign_from_subscribed([ + TopicPartition(topic, partition) + for topic in self._subscription.subscription + for partition in self._metadata_snapshot[topic] + ]) + + def _auto_assign_all_partitions(self): + # For users that use "subscribe" without group support, + # we will simply assign all partitions to this consumer + if self.config['api_version'] < (0, 9): + return True + elif self.config['group_id'] is None: + return True + else: + return False + + def _build_metadata_snapshot(self, subscription, cluster): + metadata_snapshot = {} + for topic in subscription.group_subscription(): + partitions = cluster.partitions_for_topic(topic) or [] + metadata_snapshot[topic] = set(partitions) + return metadata_snapshot + + def _lookup_assignor(self, name): + for assignor in self.config['assignors']: + if assignor.name == name: + return assignor + return None + + def _on_join_complete(self, generation, member_id, protocol, + member_assignment_bytes): + # only the leader is responsible for monitoring for metadata changes + # (i.e. partition changes) + if not self._is_leader: + self._assignment_snapshot = None + + assignor = self._lookup_assignor(protocol) + assert assignor, 'Coordinator selected invalid assignment protocol: %s' % (protocol,) + + assignment = ConsumerProtocol.ASSIGNMENT.decode(member_assignment_bytes) + + # set the flag to refresh last committed offsets + self._subscription.needs_fetch_committed_offsets = True + + # update partition assignment + try: + self._subscription.assign_from_subscribed(assignment.partitions()) + except ValueError as e: + log.warning("%s. Probably due to a deleted topic. Requesting Re-join" % e) + self.request_rejoin() + + # give the assignor a chance to update internal state + # based on the received assignment + assignor.on_assignment(assignment) + if assignor.name == 'sticky': + assignor.on_generation_assignment(generation) + + # reschedule the auto commit starting from now + self.next_auto_commit_deadline = time.time() + self.auto_commit_interval + + assigned = set(self._subscription.assigned_partitions()) + log.info("Setting newly assigned partitions %s for group %s", + assigned, self.group_id) + + # execute the user's callback after rebalance + if self._subscription.listener: + try: + self._subscription.listener.on_partitions_assigned(assigned) + except Exception: + log.exception("User provided listener %s for group %s" + " failed on partition assignment: %s", + self._subscription.listener, self.group_id, + assigned) + + def poll(self): + """ + Poll for coordinator events. Only applicable if group_id is set, and + broker version supports GroupCoordinators. This ensures that the + coordinator is known, and if using automatic partition assignment, + ensures that the consumer has joined the group. This also handles + periodic offset commits if they are enabled. + """ + if self.group_id is None: + return + + self._invoke_completed_offset_commit_callbacks() + self.ensure_coordinator_ready() + + if self.config['api_version'] >= (0, 9) and self._subscription.partitions_auto_assigned(): + if self.need_rejoin(): + # due to a race condition between the initial metadata fetch and the + # initial rebalance, we need to ensure that the metadata is fresh + # before joining initially, and then request the metadata update. If + # metadata update arrives while the rebalance is still pending (for + # example, when the join group is still inflight), then we will lose + # track of the fact that we need to rebalance again to reflect the + # change to the topic subscription. Without ensuring that the + # metadata is fresh, any metadata update that changes the topic + # subscriptions and arrives while a rebalance is in progress will + # essentially be ignored. See KAFKA-3949 for the complete + # description of the problem. + if self._subscription.subscribed_pattern: + metadata_update = self._client.cluster.request_update() + self._client.poll(future=metadata_update) + + self.ensure_active_group() + + self.poll_heartbeat() + + self._maybe_auto_commit_offsets_async() + + def time_to_next_poll(self): + """Return seconds (float) remaining until :meth:`.poll` should be called again""" + if not self.config['enable_auto_commit']: + return self.time_to_next_heartbeat() + + if time.time() > self.next_auto_commit_deadline: + return 0 + + return min(self.next_auto_commit_deadline - time.time(), + self.time_to_next_heartbeat()) + + def _perform_assignment(self, leader_id, assignment_strategy, members): + assignor = self._lookup_assignor(assignment_strategy) + assert assignor, 'Invalid assignment protocol: %s' % (assignment_strategy,) + member_metadata = {} + all_subscribed_topics = set() + for member_id, metadata_bytes in members: + metadata = ConsumerProtocol.METADATA.decode(metadata_bytes) + member_metadata[member_id] = metadata + all_subscribed_topics.update(metadata.subscription) # pylint: disable-msg=no-member + + # the leader will begin watching for changes to any of the topics + # the group is interested in, which ensures that all metadata changes + # will eventually be seen + # Because assignment typically happens within response callbacks, + # we cannot block on metadata updates here (no recursion into poll()) + self._subscription.group_subscribe(all_subscribed_topics) + self._client.set_topics(self._subscription.group_subscription()) + + # keep track of the metadata used for assignment so that we can check + # after rebalance completion whether anything has changed + self._cluster.request_update() + self._is_leader = True + self._assignment_snapshot = self._metadata_snapshot + + log.debug("Performing assignment for group %s using strategy %s" + " with subscriptions %s", self.group_id, assignor.name, + member_metadata) + + assignments = assignor.assign(self._cluster, member_metadata) + + log.debug("Finished assignment for group %s: %s", self.group_id, assignments) + + group_assignment = {} + for member_id, assignment in six.iteritems(assignments): + group_assignment[member_id] = assignment + return group_assignment + + def _on_join_prepare(self, generation, member_id): + # commit offsets prior to rebalance if auto-commit enabled + self._maybe_auto_commit_offsets_sync() + + # execute the user's callback before rebalance + log.info("Revoking previously assigned partitions %s for group %s", + self._subscription.assigned_partitions(), self.group_id) + if self._subscription.listener: + try: + revoked = set(self._subscription.assigned_partitions()) + self._subscription.listener.on_partitions_revoked(revoked) + except Exception: + log.exception("User provided subscription listener %s" + " for group %s failed on_partitions_revoked", + self._subscription.listener, self.group_id) + + self._is_leader = False + self._subscription.reset_group_subscription() + + def need_rejoin(self): + """Check whether the group should be rejoined + + Returns: + bool: True if consumer should rejoin group, False otherwise + """ + if not self._subscription.partitions_auto_assigned(): + return False + + if self._auto_assign_all_partitions(): + return False + + # we need to rejoin if we performed the assignment and metadata has changed + if (self._assignment_snapshot is not None + and self._assignment_snapshot != self._metadata_snapshot): + return True + + # we need to join if our subscription has changed since the last join + if (self._joined_subscription is not None + and self._joined_subscription != self._subscription.subscription): + return True + + return super(ConsumerCoordinator, self).need_rejoin() + + def refresh_committed_offsets_if_needed(self): + """Fetch committed offsets for assigned partitions.""" + if self._subscription.needs_fetch_committed_offsets: + offsets = self.fetch_committed_offsets(self._subscription.assigned_partitions()) + for partition, offset in six.iteritems(offsets): + # verify assignment is still active + if self._subscription.is_assigned(partition): + self._subscription.assignment[partition].committed = offset + self._subscription.needs_fetch_committed_offsets = False + + def fetch_committed_offsets(self, partitions): + """Fetch the current committed offsets for specified partitions + + Arguments: + partitions (list of TopicPartition): partitions to fetch + + Returns: + dict: {TopicPartition: OffsetAndMetadata} + """ + if not partitions: + return {} + + while True: + self.ensure_coordinator_ready() + + # contact coordinator to fetch committed offsets + future = self._send_offset_fetch_request(partitions) + self._client.poll(future=future) + + if future.succeeded(): + return future.value + + if not future.retriable(): + raise future.exception # pylint: disable-msg=raising-bad-type + + time.sleep(self.config['retry_backoff_ms'] / 1000) + + def close(self, autocommit=True): + """Close the coordinator, leave the current group, + and reset local generation / member_id. + + Keyword Arguments: + autocommit (bool): If auto-commit is configured for this consumer, + this optional flag causes the consumer to attempt to commit any + pending consumed offsets prior to close. Default: True + """ + try: + if autocommit: + self._maybe_auto_commit_offsets_sync() + finally: + super(ConsumerCoordinator, self).close() + + def _invoke_completed_offset_commit_callbacks(self): + while self.completed_offset_commits: + callback, offsets, exception = self.completed_offset_commits.popleft() + callback(offsets, exception) + + def commit_offsets_async(self, offsets, callback=None): + """Commit specific offsets asynchronously. + + Arguments: + offsets (dict {TopicPartition: OffsetAndMetadata}): what to commit + callback (callable, optional): called as callback(offsets, response) + response will be either an Exception or a OffsetCommitResponse + struct. This callback can be used to trigger custom actions when + a commit request completes. + + Returns: + kafka.future.Future + """ + self._invoke_completed_offset_commit_callbacks() + if not self.coordinator_unknown(): + future = self._do_commit_offsets_async(offsets, callback) + else: + # we don't know the current coordinator, so try to find it and then + # send the commit or fail (we don't want recursive retries which can + # cause offset commits to arrive out of order). Note that there may + # be multiple offset commits chained to the same coordinator lookup + # request. This is fine because the listeners will be invoked in the + # same order that they were added. Note also that BaseCoordinator + # prevents multiple concurrent coordinator lookup requests. + future = self.lookup_coordinator() + future.add_callback(lambda r: functools.partial(self._do_commit_offsets_async, offsets, callback)()) + if callback: + future.add_errback(lambda e: self.completed_offset_commits.appendleft((callback, offsets, e))) + + # ensure the commit has a chance to be transmitted (without blocking on + # its completion). Note that commits are treated as heartbeats by the + # coordinator, so there is no need to explicitly allow heartbeats + # through delayed task execution. + self._client.poll(timeout_ms=0) # no wakeup if we add that feature + + return future + + def _do_commit_offsets_async(self, offsets, callback=None): + assert self.config['api_version'] >= (0, 8, 1), 'Unsupported Broker API' + assert all(map(lambda k: isinstance(k, TopicPartition), offsets)) + assert all(map(lambda v: isinstance(v, OffsetAndMetadata), + offsets.values())) + if callback is None: + callback = self.config['default_offset_commit_callback'] + self._subscription.needs_fetch_committed_offsets = True + future = self._send_offset_commit_request(offsets) + future.add_both(lambda res: self.completed_offset_commits.appendleft((callback, offsets, res))) + return future + + def commit_offsets_sync(self, offsets): + """Commit specific offsets synchronously. + + This method will retry until the commit completes successfully or an + unrecoverable error is encountered. + + Arguments: + offsets (dict {TopicPartition: OffsetAndMetadata}): what to commit + + Raises error on failure + """ + assert self.config['api_version'] >= (0, 8, 1), 'Unsupported Broker API' + assert all(map(lambda k: isinstance(k, TopicPartition), offsets)) + assert all(map(lambda v: isinstance(v, OffsetAndMetadata), + offsets.values())) + self._invoke_completed_offset_commit_callbacks() + if not offsets: + return + + while True: + self.ensure_coordinator_ready() + + future = self._send_offset_commit_request(offsets) + self._client.poll(future=future) + + if future.succeeded(): + return future.value + + if not future.retriable(): + raise future.exception # pylint: disable-msg=raising-bad-type + + time.sleep(self.config['retry_backoff_ms'] / 1000) + + def _maybe_auto_commit_offsets_sync(self): + if self.config['enable_auto_commit']: + try: + self.commit_offsets_sync(self._subscription.all_consumed_offsets()) + + # The three main group membership errors are known and should not + # require a stacktrace -- just a warning + except (Errors.UnknownMemberIdError, + Errors.IllegalGenerationError, + Errors.RebalanceInProgressError): + log.warning("Offset commit failed: group membership out of date" + " This is likely to cause duplicate message" + " delivery.") + except Exception: + log.exception("Offset commit failed: This is likely to cause" + " duplicate message delivery") + + def _send_offset_commit_request(self, offsets): + """Commit offsets for the specified list of topics and partitions. + + This is a non-blocking call which returns a request future that can be + polled in the case of a synchronous commit or ignored in the + asynchronous case. + + Arguments: + offsets (dict of {TopicPartition: OffsetAndMetadata}): what should + be committed + + Returns: + Future: indicating whether the commit was successful or not + """ + assert self.config['api_version'] >= (0, 8, 1), 'Unsupported Broker API' + assert all(map(lambda k: isinstance(k, TopicPartition), offsets)) + assert all(map(lambda v: isinstance(v, OffsetAndMetadata), + offsets.values())) + if not offsets: + log.debug('No offsets to commit') + return Future().success(None) + + node_id = self.coordinator() + if node_id is None: + return Future().failure(Errors.GroupCoordinatorNotAvailableError) + + + # create the offset commit request + offset_data = collections.defaultdict(dict) + for tp, offset in six.iteritems(offsets): + offset_data[tp.topic][tp.partition] = offset + + if self._subscription.partitions_auto_assigned(): + generation = self.generation() + else: + generation = Generation.NO_GENERATION + + # if the generation is None, we are not part of an active group + # (and we expect to be). The only thing we can do is fail the commit + # and let the user rejoin the group in poll() + if self.config['api_version'] >= (0, 9) and generation is None: + return Future().failure(Errors.CommitFailedError()) + + if self.config['api_version'] >= (0, 9): + request = OffsetCommitRequest[2]( + self.group_id, + generation.generation_id, + generation.member_id, + OffsetCommitRequest[2].DEFAULT_RETENTION_TIME, + [( + topic, [( + partition, + offset.offset, + offset.metadata + ) for partition, offset in six.iteritems(partitions)] + ) for topic, partitions in six.iteritems(offset_data)] + ) + elif self.config['api_version'] >= (0, 8, 2): + request = OffsetCommitRequest[1]( + self.group_id, -1, '', + [( + topic, [( + partition, + offset.offset, + -1, + offset.metadata + ) for partition, offset in six.iteritems(partitions)] + ) for topic, partitions in six.iteritems(offset_data)] + ) + elif self.config['api_version'] >= (0, 8, 1): + request = OffsetCommitRequest[0]( + self.group_id, + [( + topic, [( + partition, + offset.offset, + offset.metadata + ) for partition, offset in six.iteritems(partitions)] + ) for topic, partitions in six.iteritems(offset_data)] + ) + + log.debug("Sending offset-commit request with %s for group %s to %s", + offsets, self.group_id, node_id) + + future = Future() + _f = self._client.send(node_id, request) + _f.add_callback(self._handle_offset_commit_response, offsets, future, time.time()) + _f.add_errback(self._failed_request, node_id, request, future) + return future + + def _handle_offset_commit_response(self, offsets, future, send_time, response): + # TODO look at adding request_latency_ms to response (like java kafka) + self.consumer_sensors.commit_latency.record((time.time() - send_time) * 1000) + unauthorized_topics = set() + + for topic, partitions in response.topics: + for partition, error_code in partitions: + tp = TopicPartition(topic, partition) + offset = offsets[tp] + + error_type = Errors.for_code(error_code) + if error_type is Errors.NoError: + log.debug("Group %s committed offset %s for partition %s", + self.group_id, offset, tp) + if self._subscription.is_assigned(tp): + self._subscription.assignment[tp].committed = offset + elif error_type is Errors.GroupAuthorizationFailedError: + log.error("Not authorized to commit offsets for group %s", + self.group_id) + future.failure(error_type(self.group_id)) + return + elif error_type is Errors.TopicAuthorizationFailedError: + unauthorized_topics.add(topic) + elif error_type in (Errors.OffsetMetadataTooLargeError, + Errors.InvalidCommitOffsetSizeError): + # raise the error to the user + log.debug("OffsetCommit for group %s failed on partition %s" + " %s", self.group_id, tp, error_type.__name__) + future.failure(error_type()) + return + elif error_type is Errors.GroupLoadInProgressError: + # just retry + log.debug("OffsetCommit for group %s failed: %s", + self.group_id, error_type.__name__) + future.failure(error_type(self.group_id)) + return + elif error_type in (Errors.GroupCoordinatorNotAvailableError, + Errors.NotCoordinatorForGroupError, + Errors.RequestTimedOutError): + log.debug("OffsetCommit for group %s failed: %s", + self.group_id, error_type.__name__) + self.coordinator_dead(error_type()) + future.failure(error_type(self.group_id)) + return + elif error_type in (Errors.UnknownMemberIdError, + Errors.IllegalGenerationError, + Errors.RebalanceInProgressError): + # need to re-join group + error = error_type(self.group_id) + log.debug("OffsetCommit for group %s failed: %s", + self.group_id, error) + self.reset_generation() + future.failure(Errors.CommitFailedError()) + return + else: + log.error("Group %s failed to commit partition %s at offset" + " %s: %s", self.group_id, tp, offset, + error_type.__name__) + future.failure(error_type()) + return + + if unauthorized_topics: + log.error("Not authorized to commit to topics %s for group %s", + unauthorized_topics, self.group_id) + future.failure(Errors.TopicAuthorizationFailedError(unauthorized_topics)) + else: + future.success(None) + + def _send_offset_fetch_request(self, partitions): + """Fetch the committed offsets for a set of partitions. + + This is a non-blocking call. The returned future can be polled to get + the actual offsets returned from the broker. + + Arguments: + partitions (list of TopicPartition): the partitions to fetch + + Returns: + Future: resolves to dict of offsets: {TopicPartition: OffsetAndMetadata} + """ + assert self.config['api_version'] >= (0, 8, 1), 'Unsupported Broker API' + assert all(map(lambda k: isinstance(k, TopicPartition), partitions)) + if not partitions: + return Future().success({}) + + node_id = self.coordinator() + if node_id is None: + return Future().failure(Errors.GroupCoordinatorNotAvailableError) + + # Verify node is ready + if not self._client.ready(node_id): + log.debug("Node %s not ready -- failing offset fetch request", + node_id) + return Future().failure(Errors.NodeNotReadyError) + + log.debug("Group %s fetching committed offsets for partitions: %s", + self.group_id, partitions) + # construct the request + topic_partitions = collections.defaultdict(set) + for tp in partitions: + topic_partitions[tp.topic].add(tp.partition) + + if self.config['api_version'] >= (0, 8, 2): + request = OffsetFetchRequest[1]( + self.group_id, + list(topic_partitions.items()) + ) + else: + request = OffsetFetchRequest[0]( + self.group_id, + list(topic_partitions.items()) + ) + + # send the request with a callback + future = Future() + _f = self._client.send(node_id, request) + _f.add_callback(self._handle_offset_fetch_response, future) + _f.add_errback(self._failed_request, node_id, request, future) + return future + + def _handle_offset_fetch_response(self, future, response): + offsets = {} + for topic, partitions in response.topics: + for partition, offset, metadata, error_code in partitions: + tp = TopicPartition(topic, partition) + error_type = Errors.for_code(error_code) + if error_type is not Errors.NoError: + error = error_type() + log.debug("Group %s failed to fetch offset for partition" + " %s: %s", self.group_id, tp, error) + if error_type is Errors.GroupLoadInProgressError: + # just retry + future.failure(error) + elif error_type is Errors.NotCoordinatorForGroupError: + # re-discover the coordinator and retry + self.coordinator_dead(error_type()) + future.failure(error) + elif error_type is Errors.UnknownTopicOrPartitionError: + log.warning("OffsetFetchRequest -- unknown topic %s" + " (have you committed any offsets yet?)", + topic) + continue + else: + log.error("Unknown error fetching offsets for %s: %s", + tp, error) + future.failure(error) + return + elif offset >= 0: + # record the position with the offset + # (-1 indicates no committed offset to fetch) + offsets[tp] = OffsetAndMetadata(offset, metadata) + else: + log.debug("Group %s has no committed offset for partition" + " %s", self.group_id, tp) + future.success(offsets) + + def _default_offset_commit_callback(self, offsets, exception): + if exception is not None: + log.error("Offset commit failed: %s", exception) + + def _commit_offsets_async_on_complete(self, offsets, exception): + if exception is not None: + log.warning("Auto offset commit failed for group %s: %s", + self.group_id, exception) + if getattr(exception, 'retriable', False): + self.next_auto_commit_deadline = min(time.time() + self.config['retry_backoff_ms'] / 1000, self.next_auto_commit_deadline) + else: + log.debug("Completed autocommit of offsets %s for group %s", + offsets, self.group_id) + + def _maybe_auto_commit_offsets_async(self): + if self.config['enable_auto_commit']: + if self.coordinator_unknown(): + self.next_auto_commit_deadline = time.time() + self.config['retry_backoff_ms'] / 1000 + elif time.time() > self.next_auto_commit_deadline: + self.next_auto_commit_deadline = time.time() + self.auto_commit_interval + self.commit_offsets_async(self._subscription.all_consumed_offsets(), + self._commit_offsets_async_on_complete) + + +class ConsumerCoordinatorMetrics(object): + def __init__(self, metrics, metric_group_prefix, subscription): + self.metrics = metrics + self.metric_group_name = '%s-coordinator-metrics' % (metric_group_prefix,) + + self.commit_latency = metrics.sensor('commit-latency') + self.commit_latency.add(metrics.metric_name( + 'commit-latency-avg', self.metric_group_name, + 'The average time taken for a commit request'), Avg()) + self.commit_latency.add(metrics.metric_name( + 'commit-latency-max', self.metric_group_name, + 'The max time taken for a commit request'), Max()) + self.commit_latency.add(metrics.metric_name( + 'commit-rate', self.metric_group_name, + 'The number of commit calls per second'), Rate(sampled_stat=Count())) + + num_parts = AnonMeasurable(lambda config, now: + len(subscription.assigned_partitions())) + metrics.add_metric(metrics.metric_name( + 'assigned-partitions', self.metric_group_name, + 'The number of partitions currently assigned to this consumer'), + num_parts) diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/heartbeat.py b/testbed/dpkp__kafka-python/kafka/coordinator/heartbeat.py new file mode 100644 index 0000000000000000000000000000000000000000..2f5930b63ff3fd9754c1d55f61b0b4a40eb7f924 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/coordinator/heartbeat.py @@ -0,0 +1,68 @@ +from __future__ import absolute_import, division + +import copy +import time + + +class Heartbeat(object): + DEFAULT_CONFIG = { + 'group_id': None, + 'heartbeat_interval_ms': 3000, + 'session_timeout_ms': 10000, + 'max_poll_interval_ms': 300000, + 'retry_backoff_ms': 100, + } + + def __init__(self, **configs): + self.config = copy.copy(self.DEFAULT_CONFIG) + for key in self.config: + if key in configs: + self.config[key] = configs[key] + + if self.config['group_id'] is not None: + assert (self.config['heartbeat_interval_ms'] + <= self.config['session_timeout_ms']), ( + 'Heartbeat interval must be lower than the session timeout') + + self.last_send = -1 * float('inf') + self.last_receive = -1 * float('inf') + self.last_poll = -1 * float('inf') + self.last_reset = time.time() + self.heartbeat_failed = None + + def poll(self): + self.last_poll = time.time() + + def sent_heartbeat(self): + self.last_send = time.time() + self.heartbeat_failed = False + + def fail_heartbeat(self): + self.heartbeat_failed = True + + def received_heartbeat(self): + self.last_receive = time.time() + + def time_to_next_heartbeat(self): + """Returns seconds (float) remaining before next heartbeat should be sent""" + time_since_last_heartbeat = time.time() - max(self.last_send, self.last_reset) + if self.heartbeat_failed: + delay_to_next_heartbeat = self.config['retry_backoff_ms'] / 1000 + else: + delay_to_next_heartbeat = self.config['heartbeat_interval_ms'] / 1000 + return max(0, delay_to_next_heartbeat - time_since_last_heartbeat) + + def should_heartbeat(self): + return self.time_to_next_heartbeat() == 0 + + def session_timeout_expired(self): + last_recv = max(self.last_receive, self.last_reset) + return (time.time() - last_recv) > (self.config['session_timeout_ms'] / 1000) + + def reset_timeouts(self): + self.last_reset = time.time() + self.last_poll = time.time() + self.heartbeat_failed = False + + def poll_timeout_expired(self): + return (time.time() - self.last_poll) > (self.config['max_poll_interval_ms'] / 1000) diff --git a/testbed/dpkp__kafka-python/kafka/coordinator/protocol.py b/testbed/dpkp__kafka-python/kafka/coordinator/protocol.py new file mode 100644 index 0000000000000000000000000000000000000000..56a39015912a0ac7a85d8463a2da8a873a857030 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/coordinator/protocol.py @@ -0,0 +1,33 @@ +from __future__ import absolute_import + +from kafka.protocol.struct import Struct +from kafka.protocol.types import Array, Bytes, Int16, Int32, Schema, String +from kafka.structs import TopicPartition + + +class ConsumerProtocolMemberMetadata(Struct): + SCHEMA = Schema( + ('version', Int16), + ('subscription', Array(String('utf-8'))), + ('user_data', Bytes)) + + +class ConsumerProtocolMemberAssignment(Struct): + SCHEMA = Schema( + ('version', Int16), + ('assignment', Array( + ('topic', String('utf-8')), + ('partitions', Array(Int32)))), + ('user_data', Bytes)) + + def partitions(self): + return [TopicPartition(topic, partition) + for topic, partitions in self.assignment # pylint: disable-msg=no-member + for partition in partitions] + + +class ConsumerProtocol(object): + PROTOCOL_TYPE = 'consumer' + ASSIGNMENT_STRATEGIES = ('range', 'roundrobin') + METADATA = ConsumerProtocolMemberMetadata + ASSIGNMENT = ConsumerProtocolMemberAssignment diff --git a/testbed/dpkp__kafka-python/kafka/metrics/__init__.py b/testbed/dpkp__kafka-python/kafka/metrics/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2a62d633435b1f8fd37ee817c3c8623e1e89a3bc --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/__init__.py @@ -0,0 +1,15 @@ +from __future__ import absolute_import + +from kafka.metrics.compound_stat import NamedMeasurable +from kafka.metrics.dict_reporter import DictReporter +from kafka.metrics.kafka_metric import KafkaMetric +from kafka.metrics.measurable import AnonMeasurable +from kafka.metrics.metric_config import MetricConfig +from kafka.metrics.metric_name import MetricName +from kafka.metrics.metrics import Metrics +from kafka.metrics.quota import Quota + +__all__ = [ + 'AnonMeasurable', 'DictReporter', 'KafkaMetric', 'MetricConfig', + 'MetricName', 'Metrics', 'NamedMeasurable', 'Quota' +] diff --git a/testbed/dpkp__kafka-python/kafka/metrics/compound_stat.py b/testbed/dpkp__kafka-python/kafka/metrics/compound_stat.py new file mode 100644 index 0000000000000000000000000000000000000000..ac92480dc591a7f964cfcec766f2f1509d94f4e4 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/compound_stat.py @@ -0,0 +1,34 @@ +from __future__ import absolute_import + +import abc + +from kafka.metrics.stat import AbstractStat + + +class AbstractCompoundStat(AbstractStat): + """ + A compound stat is a stat where a single measurement and associated + data structure feeds many metrics. This is the example for a + histogram which has many associated percentiles. + """ + __metaclass__ = abc.ABCMeta + + def stats(self): + """ + Return list of NamedMeasurable + """ + raise NotImplementedError + + +class NamedMeasurable(object): + def __init__(self, metric_name, measurable_stat): + self._name = metric_name + self._stat = measurable_stat + + @property + def name(self): + return self._name + + @property + def stat(self): + return self._stat diff --git a/testbed/dpkp__kafka-python/kafka/metrics/dict_reporter.py b/testbed/dpkp__kafka-python/kafka/metrics/dict_reporter.py new file mode 100644 index 0000000000000000000000000000000000000000..0b98fe1e4116c6d36954396af2b34a9f277f3c4a --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/dict_reporter.py @@ -0,0 +1,83 @@ +from __future__ import absolute_import + +import logging +import threading + +from kafka.metrics.metrics_reporter import AbstractMetricsReporter + +logger = logging.getLogger(__name__) + + +class DictReporter(AbstractMetricsReporter): + """A basic dictionary based metrics reporter. + + Store all metrics in a two level dictionary of category > name > metric. + """ + def __init__(self, prefix=''): + self._lock = threading.Lock() + self._prefix = prefix if prefix else '' # never allow None + self._store = {} + + def snapshot(self): + """ + Return a nested dictionary snapshot of all metrics and their + values at this time. Example: + { + 'category': { + 'metric1_name': 42.0, + 'metric2_name': 'foo' + } + } + """ + return dict((category, dict((name, metric.value()) + for name, metric in list(metrics.items()))) + for category, metrics in + list(self._store.items())) + + def init(self, metrics): + for metric in metrics: + self.metric_change(metric) + + def metric_change(self, metric): + with self._lock: + category = self.get_category(metric) + if category not in self._store: + self._store[category] = {} + self._store[category][metric.metric_name.name] = metric + + def metric_removal(self, metric): + with self._lock: + category = self.get_category(metric) + metrics = self._store.get(category, {}) + removed = metrics.pop(metric.metric_name.name, None) + if not metrics: + self._store.pop(category, None) + return removed + + def get_category(self, metric): + """ + Return a string category for the metric. + + The category is made up of this reporter's prefix and the + metric's group and tags. + + Examples: + prefix = 'foo', group = 'bar', tags = {'a': 1, 'b': 2} + returns: 'foo.bar.a=1,b=2' + + prefix = 'foo', group = 'bar', tags = None + returns: 'foo.bar' + + prefix = None, group = 'bar', tags = None + returns: 'bar' + """ + tags = ','.join('%s=%s' % (k, v) for k, v in + sorted(metric.metric_name.tags.items())) + return '.'.join(x for x in + [self._prefix, metric.metric_name.group, tags] if x) + + def configure(self, configs): + pass + + def close(self): + pass diff --git a/testbed/dpkp__kafka-python/kafka/metrics/kafka_metric.py b/testbed/dpkp__kafka-python/kafka/metrics/kafka_metric.py new file mode 100644 index 0000000000000000000000000000000000000000..9fb8d89f12740246b0beabf1af66484fceb59a2a --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/kafka_metric.py @@ -0,0 +1,36 @@ +from __future__ import absolute_import + +import time + + +class KafkaMetric(object): + # NOTE java constructor takes a lock instance + def __init__(self, metric_name, measurable, config): + if not metric_name: + raise ValueError('metric_name must be non-empty') + if not measurable: + raise ValueError('measurable must be non-empty') + self._metric_name = metric_name + self._measurable = measurable + self._config = config + + @property + def metric_name(self): + return self._metric_name + + @property + def measurable(self): + return self._measurable + + @property + def config(self): + return self._config + + @config.setter + def config(self, config): + self._config = config + + def value(self, time_ms=None): + if time_ms is None: + time_ms = time.time() * 1000 + return self.measurable.measure(self.config, time_ms) diff --git a/testbed/dpkp__kafka-python/kafka/metrics/measurable_stat.py b/testbed/dpkp__kafka-python/kafka/metrics/measurable_stat.py new file mode 100644 index 0000000000000000000000000000000000000000..4487adf6e2d8d7c0a4cb61195ead09c302032302 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/measurable_stat.py @@ -0,0 +1,16 @@ +from __future__ import absolute_import + +import abc + +from kafka.metrics.measurable import AbstractMeasurable +from kafka.metrics.stat import AbstractStat + + +class AbstractMeasurableStat(AbstractStat, AbstractMeasurable): + """ + An AbstractMeasurableStat is an AbstractStat that is also + an AbstractMeasurable (i.e. can produce a single floating point value). + This is the interface used for most of the simple statistics such + as Avg, Max, Count, etc. + """ + __metaclass__ = abc.ABCMeta diff --git a/testbed/dpkp__kafka-python/kafka/metrics/metric_config.py b/testbed/dpkp__kafka-python/kafka/metrics/metric_config.py new file mode 100644 index 0000000000000000000000000000000000000000..2e55abfcbdb2c262eaf3d6062ba994e7eafb66e5 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/metric_config.py @@ -0,0 +1,33 @@ +from __future__ import absolute_import + +import sys + + +class MetricConfig(object): + """Configuration values for metrics""" + def __init__(self, quota=None, samples=2, event_window=sys.maxsize, + time_window_ms=30 * 1000, tags=None): + """ + Arguments: + quota (Quota, optional): Upper or lower bound of a value. + samples (int, optional): Max number of samples kept per metric. + event_window (int, optional): Max number of values per sample. + time_window_ms (int, optional): Max age of an individual sample. + tags (dict of {str: str}, optional): Tags for each metric. + """ + self.quota = quota + self._samples = samples + self.event_window = event_window + self.time_window_ms = time_window_ms + # tags should be OrderedDict (not supported in py26) + self.tags = tags if tags else {} + + @property + def samples(self): + return self._samples + + @samples.setter + def samples(self, value): + if value < 1: + raise ValueError('The number of samples must be at least 1.') + self._samples = value diff --git a/testbed/dpkp__kafka-python/kafka/metrics/metric_name.py b/testbed/dpkp__kafka-python/kafka/metrics/metric_name.py new file mode 100644 index 0000000000000000000000000000000000000000..b5acd1662cc42d2e2b1d62b80235d3b514c5b9ae --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/metric_name.py @@ -0,0 +1,106 @@ +from __future__ import absolute_import + +import copy + + +class MetricName(object): + """ + This class encapsulates a metric's name, logical group and its + related attributes (tags). + + group, tags parameters can be used to create unique metric names. + e.g. domainName:type=group,key1=val1,key2=val2 + + Usage looks something like this: + + # set up metrics: + metric_tags = {'client-id': 'producer-1', 'topic': 'topic'} + metric_config = MetricConfig(tags=metric_tags) + + # metrics is the global repository of metrics and sensors + metrics = Metrics(metric_config) + + sensor = metrics.sensor('message-sizes') + metric_name = metrics.metric_name('message-size-avg', + 'producer-metrics', + 'average message size') + sensor.add(metric_name, Avg()) + + metric_name = metrics.metric_name('message-size-max', + sensor.add(metric_name, Max()) + + tags = {'client-id': 'my-client', 'topic': 'my-topic'} + metric_name = metrics.metric_name('message-size-min', + 'producer-metrics', + 'message minimum size', tags) + sensor.add(metric_name, Min()) + + # as messages are sent we record the sizes + sensor.record(message_size) + """ + + def __init__(self, name, group, description=None, tags=None): + """ + Arguments: + name (str): The name of the metric. + group (str): The logical group name of the metrics to which this + metric belongs. + description (str, optional): A human-readable description to + include in the metric. + tags (dict, optional): Additional key/val attributes of the metric. + """ + if not (name and group): + raise ValueError('name and group must be non-empty.') + if tags is not None and not isinstance(tags, dict): + raise ValueError('tags must be a dict if present.') + + self._name = name + self._group = group + self._description = description + self._tags = copy.copy(tags) + self._hash = 0 + + @property + def name(self): + return self._name + + @property + def group(self): + return self._group + + @property + def description(self): + return self._description + + @property + def tags(self): + return copy.copy(self._tags) + + def __hash__(self): + if self._hash != 0: + return self._hash + prime = 31 + result = 1 + result = prime * result + hash(self.group) + result = prime * result + hash(self.name) + tags_hash = hash(frozenset(self.tags.items())) if self.tags else 0 + result = prime * result + tags_hash + self._hash = result + return result + + def __eq__(self, other): + if self is other: + return True + if other is None: + return False + return (type(self) == type(other) and + self.group == other.group and + self.name == other.name and + self.tags == other.tags) + + def __ne__(self, other): + return not self.__eq__(other) + + def __str__(self): + return 'MetricName(name=%s, group=%s, description=%s, tags=%s)' % ( + self.name, self.group, self.description, self.tags) diff --git a/testbed/dpkp__kafka-python/kafka/metrics/metrics.py b/testbed/dpkp__kafka-python/kafka/metrics/metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..2c53488ffca14238f3a3548b5ca4aaa7461696aa --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/metrics.py @@ -0,0 +1,261 @@ +from __future__ import absolute_import + +import logging +import sys +import time +import threading + +from kafka.metrics import AnonMeasurable, KafkaMetric, MetricConfig, MetricName +from kafka.metrics.stats import Sensor + +logger = logging.getLogger(__name__) + + +class Metrics(object): + """ + A registry of sensors and metrics. + + A metric is a named, numerical measurement. A sensor is a handle to + record numerical measurements as they occur. Each Sensor has zero or + more associated metrics. For example a Sensor might represent message + sizes and we might associate with this sensor a metric for the average, + maximum, or other statistics computed off the sequence of message sizes + that are recorded by the sensor. + + Usage looks something like this: + # set up metrics: + metrics = Metrics() # the global repository of metrics and sensors + sensor = metrics.sensor('message-sizes') + metric_name = MetricName('message-size-avg', 'producer-metrics') + sensor.add(metric_name, Avg()) + metric_name = MetricName('message-size-max', 'producer-metrics') + sensor.add(metric_name, Max()) + + # as messages are sent we record the sizes + sensor.record(message_size); + """ + def __init__(self, default_config=None, reporters=None, + enable_expiration=False): + """ + Create a metrics repository with a default config, given metric + reporters and the ability to expire eligible sensors + + Arguments: + default_config (MetricConfig, optional): The default config + reporters (list of AbstractMetricsReporter, optional): + The metrics reporters + enable_expiration (bool, optional): true if the metrics instance + can garbage collect inactive sensors, false otherwise + """ + self._lock = threading.RLock() + self._config = default_config or MetricConfig() + self._sensors = {} + self._metrics = {} + self._children_sensors = {} + self._reporters = reporters or [] + for reporter in self._reporters: + reporter.init([]) + + if enable_expiration: + def expire_loop(): + while True: + # delay 30 seconds + time.sleep(30) + self.ExpireSensorTask.run(self) + metrics_scheduler = threading.Thread(target=expire_loop) + # Creating a daemon thread to not block shutdown + metrics_scheduler.daemon = True + metrics_scheduler.start() + + self.add_metric(self.metric_name('count', 'kafka-metrics-count', + 'total number of registered metrics'), + AnonMeasurable(lambda config, now: len(self._metrics))) + + @property + def config(self): + return self._config + + @property + def metrics(self): + """ + Get all the metrics currently maintained and indexed by metricName + """ + return self._metrics + + def metric_name(self, name, group, description='', tags=None): + """ + Create a MetricName with the given name, group, description and tags, + plus default tags specified in the metric configuration. + Tag in tags takes precedence if the same tag key is specified in + the default metric configuration. + + Arguments: + name (str): The name of the metric + group (str): logical group name of the metrics to which this + metric belongs + description (str, optional): A human-readable description to + include in the metric + tags (dict, optionals): additional key/value attributes of + the metric + """ + combined_tags = dict(self.config.tags) + combined_tags.update(tags or {}) + return MetricName(name, group, description, combined_tags) + + def get_sensor(self, name): + """ + Get the sensor with the given name if it exists + + Arguments: + name (str): The name of the sensor + + Returns: + Sensor: The sensor or None if no such sensor exists + """ + if not name: + raise ValueError('name must be non-empty') + return self._sensors.get(name, None) + + def sensor(self, name, config=None, + inactive_sensor_expiration_time_seconds=sys.maxsize, + parents=None): + """ + Get or create a sensor with the given unique name and zero or + more parent sensors. All parent sensors will receive every value + recorded with this sensor. + + Arguments: + name (str): The name of the sensor + config (MetricConfig, optional): A default configuration to use + for this sensor for metrics that don't have their own config + inactive_sensor_expiration_time_seconds (int, optional): + If no value if recorded on the Sensor for this duration of + time, it is eligible for removal + parents (list of Sensor): The parent sensors + + Returns: + Sensor: The sensor that is created + """ + sensor = self.get_sensor(name) + if sensor: + return sensor + + with self._lock: + sensor = self.get_sensor(name) + if not sensor: + sensor = Sensor(self, name, parents, config or self.config, + inactive_sensor_expiration_time_seconds) + self._sensors[name] = sensor + if parents: + for parent in parents: + children = self._children_sensors.get(parent) + if not children: + children = [] + self._children_sensors[parent] = children + children.append(sensor) + logger.debug('Added sensor with name %s', name) + return sensor + + def remove_sensor(self, name): + """ + Remove a sensor (if it exists), associated metrics and its children. + + Arguments: + name (str): The name of the sensor to be removed + """ + sensor = self._sensors.get(name) + if sensor: + child_sensors = None + with sensor._lock: + with self._lock: + val = self._sensors.pop(name, None) + if val and val == sensor: + for metric in sensor.metrics: + self.remove_metric(metric.metric_name) + logger.debug('Removed sensor with name %s', name) + child_sensors = self._children_sensors.pop(sensor, None) + if child_sensors: + for child_sensor in child_sensors: + self.remove_sensor(child_sensor.name) + + def add_metric(self, metric_name, measurable, config=None): + """ + Add a metric to monitor an object that implements measurable. + This metric won't be associated with any sensor. + This is a way to expose existing values as metrics. + + Arguments: + metricName (MetricName): The name of the metric + measurable (AbstractMeasurable): The measurable that will be + measured by this metric + config (MetricConfig, optional): The configuration to use when + measuring this measurable + """ + # NOTE there was a lock here, but i don't think it's needed + metric = KafkaMetric(metric_name, measurable, config or self.config) + self.register_metric(metric) + + def remove_metric(self, metric_name): + """ + Remove a metric if it exists and return it. Return None otherwise. + If a metric is removed, `metric_removal` will be invoked + for each reporter. + + Arguments: + metric_name (MetricName): The name of the metric + + Returns: + KafkaMetric: the removed `KafkaMetric` or None if no such + metric exists + """ + with self._lock: + metric = self._metrics.pop(metric_name, None) + if metric: + for reporter in self._reporters: + reporter.metric_removal(metric) + return metric + + def add_reporter(self, reporter): + """Add a MetricReporter""" + with self._lock: + reporter.init(list(self.metrics.values())) + self._reporters.append(reporter) + + def register_metric(self, metric): + with self._lock: + if metric.metric_name in self.metrics: + raise ValueError('A metric named "%s" already exists, cannot' + ' register another one.' % (metric.metric_name,)) + self.metrics[metric.metric_name] = metric + for reporter in self._reporters: + reporter.metric_change(metric) + + class ExpireSensorTask(object): + """ + This iterates over every Sensor and triggers a remove_sensor + if it has expired. Package private for testing + """ + @staticmethod + def run(metrics): + items = list(metrics._sensors.items()) + for name, sensor in items: + # remove_sensor also locks the sensor object. This is fine + # because synchronized is reentrant. There is however a minor + # race condition here. Assume we have a parent sensor P and + # child sensor C. Calling record on C would cause a record on + # P as well. So expiration time for P == expiration time for C. + # If the record on P happens via C just after P is removed, + # that will cause C to also get removed. Since the expiration + # time is typically high it is not expected to be a significant + # concern and thus not necessary to optimize + with sensor._lock: + if sensor.has_expired(): + logger.debug('Removing expired sensor %s', name) + metrics.remove_sensor(name) + + def close(self): + """Close this metrics repository.""" + for reporter in self._reporters: + reporter.close() + + self._metrics.clear() diff --git a/testbed/dpkp__kafka-python/kafka/metrics/metrics_reporter.py b/testbed/dpkp__kafka-python/kafka/metrics/metrics_reporter.py new file mode 100644 index 0000000000000000000000000000000000000000..d8bd12b3b60d011aee3bbb5efe8578448c26fab1 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/metrics_reporter.py @@ -0,0 +1,57 @@ +from __future__ import absolute_import + +import abc + + +class AbstractMetricsReporter(object): + """ + An abstract class to allow things to listen as new metrics + are created so they can be reported. + """ + __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def init(self, metrics): + """ + This is called when the reporter is first registered + to initially register all existing metrics + + Arguments: + metrics (list of KafkaMetric): All currently existing metrics + """ + raise NotImplementedError + + @abc.abstractmethod + def metric_change(self, metric): + """ + This is called whenever a metric is updated or added + + Arguments: + metric (KafkaMetric) + """ + raise NotImplementedError + + @abc.abstractmethod + def metric_removal(self, metric): + """ + This is called whenever a metric is removed + + Arguments: + metric (KafkaMetric) + """ + raise NotImplementedError + + @abc.abstractmethod + def configure(self, configs): + """ + Configure this class with the given key-value pairs + + Arguments: + configs (dict of {str, ?}) + """ + raise NotImplementedError + + @abc.abstractmethod + def close(self): + """Called when the metrics repository is closed.""" + raise NotImplementedError diff --git a/testbed/dpkp__kafka-python/kafka/metrics/quota.py b/testbed/dpkp__kafka-python/kafka/metrics/quota.py new file mode 100644 index 0000000000000000000000000000000000000000..4d1b0d6cb065f20499e0036a22d85872d5d50831 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/quota.py @@ -0,0 +1,42 @@ +from __future__ import absolute_import + + +class Quota(object): + """An upper or lower bound for metrics""" + def __init__(self, bound, is_upper): + self._bound = bound + self._upper = is_upper + + @staticmethod + def upper_bound(upper_bound): + return Quota(upper_bound, True) + + @staticmethod + def lower_bound(lower_bound): + return Quota(lower_bound, False) + + def is_upper_bound(self): + return self._upper + + @property + def bound(self): + return self._bound + + def is_acceptable(self, value): + return ((self.is_upper_bound() and value <= self.bound) or + (not self.is_upper_bound() and value >= self.bound)) + + def __hash__(self): + prime = 31 + result = prime + self.bound + return prime * result + self.is_upper_bound() + + def __eq__(self, other): + if self is other: + return True + return (type(self) == type(other) and + self.bound == other.bound and + self.is_upper_bound() == other.is_upper_bound()) + + def __ne__(self, other): + return not self.__eq__(other) diff --git a/testbed/dpkp__kafka-python/kafka/metrics/stats/__init__.py b/testbed/dpkp__kafka-python/kafka/metrics/stats/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a3d535dfd97b1182070cddda48e68990e77461f1 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/stats/__init__.py @@ -0,0 +1,17 @@ +from __future__ import absolute_import + +from kafka.metrics.stats.avg import Avg +from kafka.metrics.stats.count import Count +from kafka.metrics.stats.histogram import Histogram +from kafka.metrics.stats.max_stat import Max +from kafka.metrics.stats.min_stat import Min +from kafka.metrics.stats.percentile import Percentile +from kafka.metrics.stats.percentiles import Percentiles +from kafka.metrics.stats.rate import Rate +from kafka.metrics.stats.sensor import Sensor +from kafka.metrics.stats.total import Total + +__all__ = [ + 'Avg', 'Count', 'Histogram', 'Max', 'Min', 'Percentile', 'Percentiles', + 'Rate', 'Sensor', 'Total' +] diff --git a/testbed/dpkp__kafka-python/kafka/metrics/stats/avg.py b/testbed/dpkp__kafka-python/kafka/metrics/stats/avg.py new file mode 100644 index 0000000000000000000000000000000000000000..cfbaec30908b5f3d7d266aa1a477860e2b294753 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/stats/avg.py @@ -0,0 +1,24 @@ +from __future__ import absolute_import + +from kafka.metrics.stats.sampled_stat import AbstractSampledStat + + +class Avg(AbstractSampledStat): + """ + An AbstractSampledStat that maintains a simple average over its samples. + """ + def __init__(self): + super(Avg, self).__init__(0.0) + + def update(self, sample, config, value, now): + sample.value += value + + def combine(self, samples, config, now): + total_sum = 0 + total_count = 0 + for sample in samples: + total_sum += sample.value + total_count += sample.event_count + if not total_count: + return 0 + return float(total_sum) / total_count diff --git a/testbed/dpkp__kafka-python/kafka/metrics/stats/count.py b/testbed/dpkp__kafka-python/kafka/metrics/stats/count.py new file mode 100644 index 0000000000000000000000000000000000000000..6e0a2d5459cd2afd043fcb80e9f984a9971990af --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/stats/count.py @@ -0,0 +1,17 @@ +from __future__ import absolute_import + +from kafka.metrics.stats.sampled_stat import AbstractSampledStat + + +class Count(AbstractSampledStat): + """ + An AbstractSampledStat that maintains a simple count of what it has seen. + """ + def __init__(self): + super(Count, self).__init__(0.0) + + def update(self, sample, config, value, now): + sample.value += 1.0 + + def combine(self, samples, config, now): + return float(sum(sample.value for sample in samples)) diff --git a/testbed/dpkp__kafka-python/kafka/metrics/stats/histogram.py b/testbed/dpkp__kafka-python/kafka/metrics/stats/histogram.py new file mode 100644 index 0000000000000000000000000000000000000000..ecc6c9db4ddb89b05232ad046b5da21693d930ca --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/stats/histogram.py @@ -0,0 +1,95 @@ +from __future__ import absolute_import + +import math + + +class Histogram(object): + def __init__(self, bin_scheme): + self._hist = [0.0] * bin_scheme.bins + self._count = 0.0 + self._bin_scheme = bin_scheme + + def record(self, value): + self._hist[self._bin_scheme.to_bin(value)] += 1.0 + self._count += 1.0 + + def value(self, quantile): + if self._count == 0.0: + return float('NaN') + _sum = 0.0 + quant = float(quantile) + for i, value in enumerate(self._hist[:-1]): + _sum += value + if _sum / self._count > quant: + return self._bin_scheme.from_bin(i) + return float('inf') + + @property + def counts(self): + return self._hist + + def clear(self): + for i in range(self._hist): + self._hist[i] = 0.0 + self._count = 0 + + def __str__(self): + values = ['%.10f:%.0f' % (self._bin_scheme.from_bin(i), value) for + i, value in enumerate(self._hist[:-1])] + values.append('%s:%s' % (float('inf'), self._hist[-1])) + return '{%s}' % ','.join(values) + + class ConstantBinScheme(object): + def __init__(self, bins, min_val, max_val): + if bins < 2: + raise ValueError('Must have at least 2 bins.') + self._min = float(min_val) + self._max = float(max_val) + self._bins = int(bins) + self._bucket_width = (max_val - min_val) / (bins - 2) + + @property + def bins(self): + return self._bins + + def from_bin(self, b): + if b == 0: + return float('-inf') + elif b == self._bins - 1: + return float('inf') + else: + return self._min + (b - 1) * self._bucket_width + + def to_bin(self, x): + if x < self._min: + return 0 + elif x > self._max: + return self._bins - 1 + else: + return int(((x - self._min) / self._bucket_width) + 1) + + class LinearBinScheme(object): + def __init__(self, num_bins, max_val): + self._bins = num_bins + self._max = max_val + self._scale = max_val / (num_bins * (num_bins - 1) / 2) + + @property + def bins(self): + return self._bins + + def from_bin(self, b): + if b == self._bins - 1: + return float('inf') + else: + unscaled = (b * (b + 1.0)) / 2.0 + return unscaled * self._scale + + def to_bin(self, x): + if x < 0.0: + raise ValueError('Values less than 0.0 not accepted.') + elif x > self._max: + return self._bins - 1 + else: + scaled = x / self._scale + return int(-0.5 + math.sqrt(2.0 * scaled + 0.25)) diff --git a/testbed/dpkp__kafka-python/kafka/metrics/stats/max_stat.py b/testbed/dpkp__kafka-python/kafka/metrics/stats/max_stat.py new file mode 100644 index 0000000000000000000000000000000000000000..08aebddfdc319d49f43857fe2d0e1430dd795a4f --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/stats/max_stat.py @@ -0,0 +1,17 @@ +from __future__ import absolute_import + +from kafka.metrics.stats.sampled_stat import AbstractSampledStat + + +class Max(AbstractSampledStat): + """An AbstractSampledStat that gives the max over its samples.""" + def __init__(self): + super(Max, self).__init__(float('-inf')) + + def update(self, sample, config, value, now): + sample.value = max(sample.value, value) + + def combine(self, samples, config, now): + if not samples: + return float('-inf') + return float(max(sample.value for sample in samples)) diff --git a/testbed/dpkp__kafka-python/kafka/metrics/stats/min_stat.py b/testbed/dpkp__kafka-python/kafka/metrics/stats/min_stat.py new file mode 100644 index 0000000000000000000000000000000000000000..072106d8a39de834c69724751e61da6abf18db6c --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/stats/min_stat.py @@ -0,0 +1,19 @@ +from __future__ import absolute_import + +import sys + +from kafka.metrics.stats.sampled_stat import AbstractSampledStat + + +class Min(AbstractSampledStat): + """An AbstractSampledStat that gives the min over its samples.""" + def __init__(self): + super(Min, self).__init__(float(sys.maxsize)) + + def update(self, sample, config, value, now): + sample.value = min(sample.value, value) + + def combine(self, samples, config, now): + if not samples: + return float(sys.maxsize) + return float(min(sample.value for sample in samples)) diff --git a/testbed/dpkp__kafka-python/kafka/metrics/stats/percentile.py b/testbed/dpkp__kafka-python/kafka/metrics/stats/percentile.py new file mode 100644 index 0000000000000000000000000000000000000000..3a86a84a9f4364e485e2f9551aca9486cc0265d1 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/stats/percentile.py @@ -0,0 +1,15 @@ +from __future__ import absolute_import + + +class Percentile(object): + def __init__(self, metric_name, percentile): + self._metric_name = metric_name + self._percentile = float(percentile) + + @property + def name(self): + return self._metric_name + + @property + def percentile(self): + return self._percentile diff --git a/testbed/dpkp__kafka-python/kafka/metrics/stats/percentiles.py b/testbed/dpkp__kafka-python/kafka/metrics/stats/percentiles.py new file mode 100644 index 0000000000000000000000000000000000000000..6d702e80f18c1f59f5ee79f61a6399f5fec496dd --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/stats/percentiles.py @@ -0,0 +1,74 @@ +from __future__ import absolute_import + +from kafka.metrics import AnonMeasurable, NamedMeasurable +from kafka.metrics.compound_stat import AbstractCompoundStat +from kafka.metrics.stats import Histogram +from kafka.metrics.stats.sampled_stat import AbstractSampledStat + + +class BucketSizing(object): + CONSTANT = 0 + LINEAR = 1 + + +class Percentiles(AbstractSampledStat, AbstractCompoundStat): + """A compound stat that reports one or more percentiles""" + def __init__(self, size_in_bytes, bucketing, max_val, min_val=0.0, + percentiles=None): + super(Percentiles, self).__init__(0.0) + self._percentiles = percentiles or [] + self._buckets = int(size_in_bytes / 4) + if bucketing == BucketSizing.CONSTANT: + self._bin_scheme = Histogram.ConstantBinScheme(self._buckets, + min_val, max_val) + elif bucketing == BucketSizing.LINEAR: + if min_val != 0.0: + raise ValueError('Linear bucket sizing requires min_val' + ' to be 0.0.') + self.bin_scheme = Histogram.LinearBinScheme(self._buckets, max_val) + else: + ValueError('Unknown bucket type: %s' % (bucketing,)) + + def stats(self): + measurables = [] + + def make_measure_fn(pct): + return lambda config, now: self.value(config, now, + pct / 100.0) + + for percentile in self._percentiles: + measure_fn = make_measure_fn(percentile.percentile) + stat = NamedMeasurable(percentile.name, AnonMeasurable(measure_fn)) + measurables.append(stat) + return measurables + + def value(self, config, now, quantile): + self.purge_obsolete_samples(config, now) + count = sum(sample.event_count for sample in self._samples) + if count == 0.0: + return float('NaN') + sum_val = 0.0 + quant = float(quantile) + for b in range(self._buckets): + for sample in self._samples: + assert type(sample) is self.HistogramSample + hist = sample.histogram.counts + sum_val += hist[b] + if sum_val / count > quant: + return self._bin_scheme.from_bin(b) + return float('inf') + + def combine(self, samples, config, now): + return self.value(config, now, 0.5) + + def new_sample(self, time_ms): + return Percentiles.HistogramSample(self._bin_scheme, time_ms) + + def update(self, sample, config, value, time_ms): + assert type(sample) is self.HistogramSample + sample.histogram.record(value) + + class HistogramSample(AbstractSampledStat.Sample): + def __init__(self, scheme, now): + super(Percentiles.HistogramSample, self).__init__(0.0, now) + self.histogram = Histogram(scheme) diff --git a/testbed/dpkp__kafka-python/kafka/metrics/stats/rate.py b/testbed/dpkp__kafka-python/kafka/metrics/stats/rate.py new file mode 100644 index 0000000000000000000000000000000000000000..68393fbf743fd2a3d5d6007fd2af5a209b2c6ba9 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/stats/rate.py @@ -0,0 +1,117 @@ +from __future__ import absolute_import + +from kafka.metrics.measurable_stat import AbstractMeasurableStat +from kafka.metrics.stats.sampled_stat import AbstractSampledStat + + +class TimeUnit(object): + _names = { + 'nanosecond': 0, + 'microsecond': 1, + 'millisecond': 2, + 'second': 3, + 'minute': 4, + 'hour': 5, + 'day': 6, + } + + NANOSECONDS = _names['nanosecond'] + MICROSECONDS = _names['microsecond'] + MILLISECONDS = _names['millisecond'] + SECONDS = _names['second'] + MINUTES = _names['minute'] + HOURS = _names['hour'] + DAYS = _names['day'] + + @staticmethod + def get_name(time_unit): + return TimeUnit._names[time_unit] + + +class Rate(AbstractMeasurableStat): + """ + The rate of the given quantity. By default this is the total observed + over a set of samples from a sampled statistic divided by the elapsed + time over the sample windows. Alternative AbstractSampledStat + implementations can be provided, however, to record the rate of + occurrences (e.g. the count of values measured over the time interval) + or other such values. + """ + def __init__(self, time_unit=TimeUnit.SECONDS, sampled_stat=None): + self._stat = sampled_stat or SampledTotal() + self._unit = time_unit + + def unit_name(self): + return TimeUnit.get_name(self._unit) + + def record(self, config, value, time_ms): + self._stat.record(config, value, time_ms) + + def measure(self, config, now): + value = self._stat.measure(config, now) + return float(value) / self.convert(self.window_size(config, now)) + + def window_size(self, config, now): + # purge old samples before we compute the window size + self._stat.purge_obsolete_samples(config, now) + + """ + Here we check the total amount of time elapsed since the oldest + non-obsolete window. This give the total window_size of the batch + which is the time used for Rate computation. However, there is + an issue if we do not have sufficient data for e.g. if only + 1 second has elapsed in a 30 second window, the measured rate + will be very high. Hence we assume that the elapsed time is + always N-1 complete windows plus whatever fraction of the final + window is complete. + + Note that we could simply count the amount of time elapsed in + the current window and add n-1 windows to get the total time, + but this approach does not account for sleeps. AbstractSampledStat + only creates samples whenever record is called, if no record is + called for a period of time that time is not accounted for in + window_size and produces incorrect results. + """ + total_elapsed_time_ms = now - self._stat.oldest(now).last_window_ms + # Check how many full windows of data we have currently retained + num_full_windows = int(total_elapsed_time_ms / config.time_window_ms) + min_full_windows = config.samples - 1 + + # If the available windows are less than the minimum required, + # add the difference to the totalElapsedTime + if num_full_windows < min_full_windows: + total_elapsed_time_ms += ((min_full_windows - num_full_windows) * + config.time_window_ms) + + return total_elapsed_time_ms + + def convert(self, time_ms): + if self._unit == TimeUnit.NANOSECONDS: + return time_ms * 1000.0 * 1000.0 + elif self._unit == TimeUnit.MICROSECONDS: + return time_ms * 1000.0 + elif self._unit == TimeUnit.MILLISECONDS: + return time_ms + elif self._unit == TimeUnit.SECONDS: + return time_ms / 1000.0 + elif self._unit == TimeUnit.MINUTES: + return time_ms / (60.0 * 1000.0) + elif self._unit == TimeUnit.HOURS: + return time_ms / (60.0 * 60.0 * 1000.0) + elif self._unit == TimeUnit.DAYS: + return time_ms / (24.0 * 60.0 * 60.0 * 1000.0) + else: + raise ValueError('Unknown unit: %s' % (self._unit,)) + + +class SampledTotal(AbstractSampledStat): + def __init__(self, initial_value=None): + if initial_value is not None: + raise ValueError('initial_value cannot be set on SampledTotal') + super(SampledTotal, self).__init__(0.0) + + def update(self, sample, config, value, time_ms): + sample.value += value + + def combine(self, samples, config, now): + return float(sum(sample.value for sample in samples)) diff --git a/testbed/dpkp__kafka-python/kafka/metrics/stats/sampled_stat.py b/testbed/dpkp__kafka-python/kafka/metrics/stats/sampled_stat.py new file mode 100644 index 0000000000000000000000000000000000000000..c41b14bbc73acc3b453ad4c6a38b2b8f03e61dc0 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/stats/sampled_stat.py @@ -0,0 +1,101 @@ +from __future__ import absolute_import + +import abc + +from kafka.metrics.measurable_stat import AbstractMeasurableStat + + +class AbstractSampledStat(AbstractMeasurableStat): + """ + An AbstractSampledStat records a single scalar value measured over + one or more samples. Each sample is recorded over a configurable + window. The window can be defined by number of events or elapsed + time (or both, if both are given the window is complete when + *either* the event count or elapsed time criterion is met). + + All the samples are combined to produce the measurement. When a + window is complete the oldest sample is cleared and recycled to + begin recording the next sample. + + Subclasses of this class define different statistics measured + using this basic pattern. + """ + __metaclass__ = abc.ABCMeta + + def __init__(self, initial_value): + self._initial_value = initial_value + self._samples = [] + self._current = 0 + + @abc.abstractmethod + def update(self, sample, config, value, time_ms): + raise NotImplementedError + + @abc.abstractmethod + def combine(self, samples, config, now): + raise NotImplementedError + + def record(self, config, value, time_ms): + sample = self.current(time_ms) + if sample.is_complete(time_ms, config): + sample = self._advance(config, time_ms) + self.update(sample, config, float(value), time_ms) + sample.event_count += 1 + + def new_sample(self, time_ms): + return self.Sample(self._initial_value, time_ms) + + def measure(self, config, now): + self.purge_obsolete_samples(config, now) + return float(self.combine(self._samples, config, now)) + + def current(self, time_ms): + if not self._samples: + self._samples.append(self.new_sample(time_ms)) + return self._samples[self._current] + + def oldest(self, now): + if not self._samples: + self._samples.append(self.new_sample(now)) + oldest = self._samples[0] + for sample in self._samples[1:]: + if sample.last_window_ms < oldest.last_window_ms: + oldest = sample + return oldest + + def purge_obsolete_samples(self, config, now): + """ + Timeout any windows that have expired in the absence of any events + """ + expire_age = config.samples * config.time_window_ms + for sample in self._samples: + if now - sample.last_window_ms >= expire_age: + sample.reset(now) + + def _advance(self, config, time_ms): + self._current = (self._current + 1) % config.samples + if self._current >= len(self._samples): + sample = self.new_sample(time_ms) + self._samples.append(sample) + return sample + else: + sample = self.current(time_ms) + sample.reset(time_ms) + return sample + + class Sample(object): + + def __init__(self, initial_value, now): + self.initial_value = initial_value + self.event_count = 0 + self.last_window_ms = now + self.value = initial_value + + def reset(self, now): + self.event_count = 0 + self.last_window_ms = now + self.value = self.initial_value + + def is_complete(self, time_ms, config): + return (time_ms - self.last_window_ms >= config.time_window_ms or + self.event_count >= config.event_window) diff --git a/testbed/dpkp__kafka-python/kafka/metrics/stats/total.py b/testbed/dpkp__kafka-python/kafka/metrics/stats/total.py new file mode 100644 index 0000000000000000000000000000000000000000..5b3bb87fd19a835462e2469a5873d483f3a19d84 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/metrics/stats/total.py @@ -0,0 +1,15 @@ +from __future__ import absolute_import + +from kafka.metrics.measurable_stat import AbstractMeasurableStat + + +class Total(AbstractMeasurableStat): + """An un-windowed cumulative total maintained over all time.""" + def __init__(self, value=0.0): + self._total = value + + def record(self, config, value, now): + self._total += value + + def measure(self, config, now): + return float(self._total) diff --git a/testbed/dpkp__kafka-python/kafka/partitioner/__init__.py b/testbed/dpkp__kafka-python/kafka/partitioner/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..21a3bbb66622ee5c6880fdea6dea9c95905cba5c --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/partitioner/__init__.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +from kafka.partitioner.default import DefaultPartitioner, murmur2 + + +__all__ = [ + 'DefaultPartitioner', 'murmur2' +] diff --git a/testbed/dpkp__kafka-python/kafka/producer/buffer.py b/testbed/dpkp__kafka-python/kafka/producer/buffer.py new file mode 100644 index 0000000000000000000000000000000000000000..1008017004c2cdc560472ac8db2e1a84ea167dcc --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/producer/buffer.py @@ -0,0 +1,115 @@ +from __future__ import absolute_import, division + +import collections +import io +import threading +import time + +from kafka.metrics.stats import Rate + +import kafka.errors as Errors + + +class SimpleBufferPool(object): + """A simple pool of BytesIO objects with a weak memory ceiling.""" + def __init__(self, memory, poolable_size, metrics=None, metric_group_prefix='producer-metrics'): + """Create a new buffer pool. + + Arguments: + memory (int): maximum memory that this buffer pool can allocate + poolable_size (int): memory size per buffer to cache in the free + list rather than deallocating + """ + self._poolable_size = poolable_size + self._lock = threading.RLock() + + buffers = int(memory / poolable_size) if poolable_size else 0 + self._free = collections.deque([io.BytesIO() for _ in range(buffers)]) + + self._waiters = collections.deque() + self.wait_time = None + if metrics: + self.wait_time = metrics.sensor('bufferpool-wait-time') + self.wait_time.add(metrics.metric_name( + 'bufferpool-wait-ratio', metric_group_prefix, + 'The fraction of time an appender waits for space allocation.'), + Rate()) + + def allocate(self, size, max_time_to_block_ms): + """ + Allocate a buffer of the given size. This method blocks if there is not + enough memory and the buffer pool is configured with blocking mode. + + Arguments: + size (int): The buffer size to allocate in bytes [ignored] + max_time_to_block_ms (int): The maximum time in milliseconds to + block for buffer memory to be available + + Returns: + io.BytesIO + """ + with self._lock: + # check if we have a free buffer of the right size pooled + if self._free: + return self._free.popleft() + + elif self._poolable_size == 0: + return io.BytesIO() + + else: + # we are out of buffers and will have to block + buf = None + more_memory = threading.Condition(self._lock) + self._waiters.append(more_memory) + # loop over and over until we have a buffer or have reserved + # enough memory to allocate one + while buf is None: + start_wait = time.time() + more_memory.wait(max_time_to_block_ms / 1000.0) + end_wait = time.time() + if self.wait_time: + self.wait_time.record(end_wait - start_wait) + + if self._free: + buf = self._free.popleft() + else: + self._waiters.remove(more_memory) + raise Errors.KafkaTimeoutError( + "Failed to allocate memory within the configured" + " max blocking time") + + # remove the condition for this thread to let the next thread + # in line start getting memory + removed = self._waiters.popleft() + assert removed is more_memory, 'Wrong condition' + + # signal any additional waiters if there is more memory left + # over for them + if self._free and self._waiters: + self._waiters[0].notify() + + # unlock and return the buffer + return buf + + def deallocate(self, buf): + """ + Return buffers to the pool. If they are of the poolable size add them + to the free list, otherwise just mark the memory as free. + + Arguments: + buffer_ (io.BytesIO): The buffer to return + """ + with self._lock: + # BytesIO.truncate here makes the pool somewhat pointless + # but we stick with the BufferPool API until migrating to + # bytesarray / memoryview. The buffer we return must not + # expose any prior data on read(). + buf.truncate(0) + self._free.append(buf) + if self._waiters: + self._waiters[0].notify() + + def queued(self): + """The number of threads blocked waiting on memory.""" + with self._lock: + return len(self._waiters) diff --git a/testbed/dpkp__kafka-python/kafka/protocol/group.py b/testbed/dpkp__kafka-python/kafka/protocol/group.py new file mode 100644 index 0000000000000000000000000000000000000000..bcb96553b32c7fe960a11f1912091e76873723b2 --- /dev/null +++ b/testbed/dpkp__kafka-python/kafka/protocol/group.py @@ -0,0 +1,230 @@ +from __future__ import absolute_import + +from kafka.protocol.api import Request, Response +from kafka.protocol.struct import Struct +from kafka.protocol.types import Array, Bytes, Int16, Int32, Schema, String + + +class JoinGroupResponse_v0(Response): + API_KEY = 11 + API_VERSION = 0 + SCHEMA = Schema( + ('error_code', Int16), + ('generation_id', Int32), + ('group_protocol', String('utf-8')), + ('leader_id', String('utf-8')), + ('member_id', String('utf-8')), + ('members', Array( + ('member_id', String('utf-8')), + ('member_metadata', Bytes))) + ) + + +class JoinGroupResponse_v1(Response): + API_KEY = 11 + API_VERSION = 1 + SCHEMA = JoinGroupResponse_v0.SCHEMA + + +class JoinGroupResponse_v2(Response): + API_KEY = 11 + API_VERSION = 2 + SCHEMA = Schema( + ('throttle_time_ms', Int32), + ('error_code', Int16), + ('generation_id', Int32), + ('group_protocol', String('utf-8')), + ('leader_id', String('utf-8')), + ('member_id', String('utf-8')), + ('members', Array( + ('member_id', String('utf-8')), + ('member_metadata', Bytes))) + ) + + +class JoinGroupRequest_v0(Request): + API_KEY = 11 + API_VERSION = 0 + RESPONSE_TYPE = JoinGroupResponse_v0 + SCHEMA = Schema( + ('group', String('utf-8')), + ('session_timeout', Int32), + ('member_id', String('utf-8')), + ('protocol_type', String('utf-8')), + ('group_protocols', Array( + ('protocol_name', String('utf-8')), + ('protocol_metadata', Bytes))) + ) + UNKNOWN_MEMBER_ID = '' + + +class JoinGroupRequest_v1(Request): + API_KEY = 11 + API_VERSION = 1 + RESPONSE_TYPE = JoinGroupResponse_v1 + SCHEMA = Schema( + ('group', String('utf-8')), + ('session_timeout', Int32), + ('rebalance_timeout', Int32), + ('member_id', String('utf-8')), + ('protocol_type', String('utf-8')), + ('group_protocols', Array( + ('protocol_name', String('utf-8')), + ('protocol_metadata', Bytes))) + ) + UNKNOWN_MEMBER_ID = '' + + +class JoinGroupRequest_v2(Request): + API_KEY = 11 + API_VERSION = 2 + RESPONSE_TYPE = JoinGroupResponse_v2 + SCHEMA = JoinGroupRequest_v1.SCHEMA + UNKNOWN_MEMBER_ID = '' + + +JoinGroupRequest = [ + JoinGroupRequest_v0, JoinGroupRequest_v1, JoinGroupRequest_v2 +] +JoinGroupResponse = [ + JoinGroupResponse_v0, JoinGroupResponse_v1, JoinGroupResponse_v2 +] + + +class ProtocolMetadata(Struct): + SCHEMA = Schema( + ('version', Int16), + ('subscription', Array(String('utf-8'))), # topics list + ('user_data', Bytes) + ) + + +class SyncGroupResponse_v0(Response): + API_KEY = 14 + API_VERSION = 0 + SCHEMA = Schema( + ('error_code', Int16), + ('member_assignment', Bytes) + ) + + +class SyncGroupResponse_v1(Response): + API_KEY = 14 + API_VERSION = 1 + SCHEMA = Schema( + ('throttle_time_ms', Int32), + ('error_code', Int16), + ('member_assignment', Bytes) + ) + + +class SyncGroupRequest_v0(Request): + API_KEY = 14 + API_VERSION = 0 + RESPONSE_TYPE = SyncGroupResponse_v0 + SCHEMA = Schema( + ('group', String('utf-8')), + ('generation_id', Int32), + ('member_id', String('utf-8')), + ('group_assignment', Array( + ('member_id', String('utf-8')), + ('member_metadata', Bytes))) + ) + + +class SyncGroupRequest_v1(Request): + API_KEY = 14 + API_VERSION = 1 + RESPONSE_TYPE = SyncGroupResponse_v1 + SCHEMA = SyncGroupRequest_v0.SCHEMA + + +SyncGroupRequest = [SyncGroupRequest_v0, SyncGroupRequest_v1] +SyncGroupResponse = [SyncGroupResponse_v0, SyncGroupResponse_v1] + + +class MemberAssignment(Struct): + SCHEMA = Schema( + ('version', Int16), + ('assignment', Array( + ('topic', String('utf-8')), + ('partitions', Array(Int32)))), + ('user_data', Bytes) + ) + + +class HeartbeatResponse_v0(Response): + API_KEY = 12 + API_VERSION = 0 + SCHEMA = Schema( + ('error_code', Int16) + ) + + +class HeartbeatResponse_v1(Response): + API_KEY = 12 + API_VERSION = 1 + SCHEMA = Schema( + ('throttle_time_ms', Int32), + ('error_code', Int16) + ) + + +class HeartbeatRequest_v0(Request): + API_KEY = 12 + API_VERSION = 0 + RESPONSE_TYPE = HeartbeatResponse_v0 + SCHEMA = Schema( + ('group', String('utf-8')), + ('generation_id', Int32), + ('member_id', String('utf-8')) + ) + + +class HeartbeatRequest_v1(Request): + API_KEY = 12 + API_VERSION = 1 + RESPONSE_TYPE = HeartbeatResponse_v1 + SCHEMA = HeartbeatRequest_v0.SCHEMA + + +HeartbeatRequest = [HeartbeatRequest_v0, HeartbeatRequest_v1] +HeartbeatResponse = [HeartbeatResponse_v0, HeartbeatResponse_v1] + + +class LeaveGroupResponse_v0(Response): + API_KEY = 13 + API_VERSION = 0 + SCHEMA = Schema( + ('error_code', Int16) + ) + + +class LeaveGroupResponse_v1(Response): + API_KEY = 13 + API_VERSION = 1 + SCHEMA = Schema( + ('throttle_time_ms', Int32), + ('error_code', Int16) + ) + + +class LeaveGroupRequest_v0(Request): + API_KEY = 13 + API_VERSION = 0 + RESPONSE_TYPE = LeaveGroupResponse_v0 + SCHEMA = Schema( + ('group', String('utf-8')), + ('member_id', String('utf-8')) + ) + + +class LeaveGroupRequest_v1(Request): + API_KEY = 13 + API_VERSION = 1 + RESPONSE_TYPE = LeaveGroupResponse_v1 + SCHEMA = LeaveGroupRequest_v0.SCHEMA + + +LeaveGroupRequest = [LeaveGroupRequest_v0, LeaveGroupRequest_v1] +LeaveGroupResponse = [LeaveGroupResponse_v0, LeaveGroupResponse_v1]
    + +
    +
    + + + + + +
    + + + + +
    + + +
    + + + + + + + + + + + +
    + + +
    + + + + + + + + + +
    +
    + +
    +
    + +
    +
    +
    + + + +
      + +
    • + Pull Request +
    • + +
    • +
      + +
      + + + + Watch + + + +
      +
      +
      + Notification status + +
      + +
      + +
      + +
      + +

      Not watching

      + You only receive notifications for discussions in which you participate or are @mentioned. + + + Watch + +
      +
      + +
      + +
      + +

      Watching

      + You receive notifications for all discussions in this repository. + + + Unwatch + +
      +
      + +
      + +
      + +

      Ignoring

      + You do not receive any notifications for discussions in this repository. + + + Stop ignoring + +
      +
      + +
      + +
      +
      +
      + +
      +
    • + +
    • + + + Unstar + + + + Star + + +
    • + +
    • + + + Fork + + +
    • + + +
    + +

    + public + + + / + django +

    +
    + + + + +
    + + + + + + +
    + + +
    + + + tree: + d7504a3d7b + + +
    + +
    +
    + Switch branches/tags + +
    + +
    +
    + +
    +
    + +
    +
    + + + + +
    +
    + +
    + + 1.5c2 +
    +
    + + 1.5c1 +
    +
    + + 1.5b2 +
    +
    + + 1.5b1 +
    +
    + + 1.5a1 +
    +
    + + 1.5.1 +
    +
    + + 1.5 +
    +
    + + 1.4.5 +
    +
    + + 1.4.4 +
    +
    + + 1.4.3 +
    +
    + + 1.4.2 +
    +
    + + 1.4.1 +
    +
    + + 1.4 +
    +
    + + 1.3.7 +
    +
    + + 1.3.6 +
    +
    + + 1.3.5 +
    +
    + + 1.3.4 +
    +
    + + 1.3.3 +
    +
    + + 1.3.2 +
    +
    + + 1.3.1 +
    +
    + + 1.3 +
    +
    + + 1.2.7 +
    +
    + + 1.2.6 +
    +
    + + 1.2.5 +
    +
    + + 1.2.4 +
    +
    + + 1.2.3 +
    +
    + + 1.2.2 +
    +
    + + 1.2.1 +
    +
    + + 1.2 +
    +
    + + 1.1.4 +
    +
    + + 1.1.3 +
    +
    + + 1.1.2 +
    +
    + + 1.1.1 +
    +
    + + 1.1 +
    +
    + + 1.0.4 +
    +
    + + 1.0.3 +
    +
    + + 1.0.2 +
    +
    + + 1.0.1 +
    +
    + + 1.0 +
    +
    + +
    Nothing to show
    + +
    + +
    +
    +
    + +
    + + + +
    + + + + + + + +
    +
    + +
    + + + +
    + Browse code + +

    + Improved regex in strip_tags +

    + +
    Thanks Pablo Recio for the report. Refs #19237.
    + +
    + commit d7504a3d7b8645bdb979bab7ded0e9a9b6dccd0e + + + 1 parent + + afa3e16 + + +
    + + + authored + +
    +
    +
    + + + +
    +

    + Showing 2 changed files + with 2 additions + and 1 deletion. + Show Diff Stats + Hide Diff Stats

    + +
      +
    1. + + + 2 +  + + + django/utils/html.py +
    2. +
    3. + + + 1 +  + + + tests/regressiontests/utils/html.py +
    4. +
    +
    + + +
    +
    +
    +
    + 2  + + + django/utils/html.py + + +
    +
    + + + + +
    +
    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + ... + + ... + + + @@ -33,7 +33,7 @@ +
    + 33 + + 33 + + +  html_gunk_re = re.compile(r'(?:<br clear="all">|<i><\/i>|<b><\/b>|<em><\/em>|<strong><\/strong>|<\/?smallcaps>|<\/?uppercase>)', re.IGNORECASE) +
    + 34 + + 34 + + +  hard_coded_bullets_re = re.compile(r'((?:<p>(?:%s).*?[a-zA-Z].*?</p>\s*)+)' % '|'.join([re.escape(x) for x in DOTS]), re.DOTALL) +
    + 35 + + 35 + + +  trailing_empty_content_re = re.compile(r'(?:<p>(?:&nbsp;|\s|<br \/>)*?</p>\s*)+\Z') +
    + 36 + +   + + + -strip_tags_re = re.compile(r'</?\S([^=]*=(\s*"[^"]*"|\s*\'[^\']*\'|\S*)|[^>])*?>', re.IGNORECASE) +
    +   + + 36 + + + +strip_tags_re = re.compile(r'</?\S([^=>]*=(\s*"[^"]*"|\s*\'[^\']*\'|\S*)|[^>])*?>', re.IGNORECASE) +
    + 37 + + 37 + + +   +
    + 38 + + 38 + + +   +
    + 39 + + 39 + + +  def escape(text): +
    +
    + +
    +
    +
    +
    +
    + 1  + + + tests/regressiontests/utils/html.py + + +
    +
    + + + + +
    +
    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + ... + + ... + + + @@ -68,6 +68,7 @@ def test_strip_tags(self): +
    + 68 + + 68 + + +              ('a<p onclick="alert(\'<test>\')">b</p>c', 'abc'), +
    + 69 + + 69 + + +              ('a<p a >b</p>c', 'abc'), +
    + 70 + + 70 + + +              ('d<a:b c:d>e</p>f', 'def'), +
    +   + + 71 + + + +            ('<strong>foo</strong><a href="http://example.com">bar</a>', 'foobar'), +
    + 71 + + 72 + + +          ) +
    + 72 + + 73 + + +          for value, output in items: +
    + 73 + + 74 + + +              self.check_output(f, value, output) +
    +
    + +
    +
    +
    + + +
    + + +

    + 0 notes + on commit d7504a3 + +

    +
    +
    + +
    + + +
    +
    +
    + + + +
    +
    + + + Comments are parsed with GitHub Flavored Markdown + +
    + + +
    + + + + + + + + +

    + + Attach images by dragging & dropping them or + + choose an image + + + Octocat-spinner-32 Uploading your images now… + + + Unfortunately we don't support that file type yet. Try image files less than 5MB. + + + This browser doesn't support image attachments. + + + Something went really wrong and we can't process that image. + +

    + +
    +
    + +
    + + + +
    +
    +
    +

    Nothing to preview

    +
    +
    +
    +
    + +
    + +
    +
    +
    +
    +
    + +
    +
    + +
    +
    + Commit_comment_tip +

    Tip: You can also add notes to lines in a file. Hover to the left of a line to make a note

    +
    + +
    +
    + + + +
    + + + +
    + + +
    + +
    + + + + + + + Watch thread + + + +
    +
    +
    + Thread notifications + +
    + +
    + +
    + +
    + +

    Not watching

    + You only receive notifications for this thread if you participate or are @mentioned. + + + Watch thread + +
    +
    + +
    + +
    + +

    Watching

    + Receive all notifications for this thread. + + + Unwatch thread + +
    +
    + +
    + +
    + +

    Ignoring

    + You do not receive notifications for this thread. + + + Stop ignoring thread + +
    +
    + +
    +
    +
    +
    + +

    You only receive notifications for this thread when you participate or are @mentioned.

    +
    + + + + + + + +
    +
    +
    +
    + + +