From 02b9a815805dee5690174c1d89fb27860c97a0b3 Mon Sep 17 00:00:00 2001 From: Averi Kitsch Date: Mon, 26 Feb 2024 08:55:51 -0800 Subject: [PATCH 01/23] chore: update release pipeline requirements (#36) --- .kokoro/requirements.txt | 618 ++++++++++++--------------------------- 1 file changed, 189 insertions(+), 429 deletions(-) diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 8c11c9f..79a4694 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -1,271 +1,144 @@ # -# This file is autogenerated by pip-compile with Python 3.9 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes requirements.in +# pip-compile --generate-hashes requirements.in # -argcomplete==3.1.4 \ - --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ - --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f - # via nox -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 +attrs==22.1.0 \ + --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ + --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c # via gcp-releasetool -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 +cachetools==4.2.4 \ + --hash=sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693 \ + --hash=sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1 # via google-auth certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests + # via requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 - # via - # gcp-docuploader - # nox -cryptography==42.0.0 \ - --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ - --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ - --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ - --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ - --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ - --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ - --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ - --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ - --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ - --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ - --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ - --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ - --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ - --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ - --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ - --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ - --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ - --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ - --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ - --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ - --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ - --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ - --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ - --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ - --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ - --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ - --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ - --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ - --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ - --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ - --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ - --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 + # via gcp-releasetool +cryptography==41.0.6 \ + --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ + --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ + --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ + --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ + --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ + --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ + --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ + --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ + --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ + --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ + --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ + --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ + --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ + --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ + --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ + --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ + --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ + --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ + --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ + --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ + --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ + --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ + --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae # via # gcp-releasetool # secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 - # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b - # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==1.17.0 \ + --hash=sha256:1a759f4b0906f4ea9dc7db3649aa11a632c72f6dc6a54f10cf57c1925d034a1c \ + --hash=sha256:f23db51d85484998af5549181be726f177bf90b481de238fe7a99ec970266b6b # via -r requirements.in -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.3.3 \ - --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ - --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 - # via google-cloud-storage -google-cloud-storage==2.13.0 \ - --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ - --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 - # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.6.0 \ - --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ - --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b - # via google-cloud-storage -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b - # via google-api-core +google-auth==2.15.0 \ + --hash=sha256:6897b93556d8d807ad70701bb89f000183aea366ca7ed94680828b37437a4994 \ + --hash=sha256:72f12a6cfc968d754d7bdab369c5c5c16032106e52d32c6dfd8484e4c01a6d1f + # via gcp-releasetool idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 +importlib-metadata==4.13.0 \ + --hash=sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116 \ + --hash=sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d + # via keyring +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via - # -r requirements.in # keyring - # twine -jaraco-classes==3.3.0 \ - --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ - --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 - # via keyring + # keyrings-alt jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 @@ -276,122 +149,64 @@ jinja2==3.1.3 \ --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via gcp-releasetool -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 - # via - # gcp-releasetool - # twine -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 + # via gcp-releasetool +keyrings-alt==4.2.0 \ + --hash=sha256:2ba3d56441ba0637f5f9c096068f67010ac0453f9d0b626de2aa3019353b6431 \ + --hash=sha256:3d25912ed71d6deec85d7e6e867963e1357cd56186a41c9295b86939a5ebf85c + # via -r requirements.in +markupsafe==2.1.1 \ + --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ + --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ + --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ + --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ + --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ + --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ + --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ + --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ + --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ + --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ + --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ + --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ + --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ + --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ + --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ + --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ + --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ + --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ + --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ + --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ + --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ + --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ + --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ + --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ + --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ + --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ + --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ + --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ + --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ + --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ + --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ + --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ + --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ + --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ + --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ + --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ + --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ + --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ + --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ + --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 - # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f - # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 - # via - # gcp-releasetool - # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 - # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e - # via virtualenv +packaging==23.0 \ + --hash=sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2 \ + --hash=sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97 + # via gcp-releasetool protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ @@ -415,34 +230,24 @@ protobuf==3.20.3 \ --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core - # googleapis-common-protos -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # via gcp-releasetool +pyasn1==0.4.8 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d +pyasn1-modules==0.2.8 \ + --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ + --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 # via google-auth pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 - # via - # readme-renderer - # rich -pyjwt==2.8.0 \ - --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ - --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyperclip==1.8.2 \ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 @@ -451,31 +256,10 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 - # via twine requests==2.31.0 \ --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==13.6.0 \ - --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ - --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef - # via twine + # via gcp-releasetool rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 @@ -488,37 +272,13 @@ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via - # gcp-docuploader + # google-auth # python-dateutil -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 - # via -r requirements.in -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef - # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e - # via - # requests - # twine -virtualenv==20.24.6 \ - --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ - --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 - # via nox -wheel==0.41.3 \ - --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ - --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 - # via -r requirements.in -zipp==3.17.0 \ - --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ - --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 +urllib3==1.26.18 \ + --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ + --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 + # via requests +zipp==3.11.0 \ + --hash=sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa \ + --hash=sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766 # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a - # via -r requirements.in From ca7234e2e7581d04b3a058632471b1ddd9486f86 Mon Sep 17 00:00:00 2001 From: Averi Kitsch Date: Mon, 26 Feb 2024 09:35:55 -0800 Subject: [PATCH 02/23] chore: add build module to release pipeline (#37) --- .kokoro/requirements.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 79a4694..868df30 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -282,3 +282,6 @@ zipp==3.11.0 \ --hash=sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa \ --hash=sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766 # via importlib-metadata +build==1.0.3 \ + --hash=sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b \ + --hash=sha256:589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f From a7816e8a707a9c6bd4c37beb9906e1a9b107f2b7 Mon Sep 17 00:00:00 2001 From: Averi Kitsch Date: Mon, 26 Feb 2024 09:47:20 -0800 Subject: [PATCH 03/23] chore: regenerate kokoro deps (#38) --- .kokoro/requirements.txt | 279 ++++++++++++++++++++++++++++++++++----- 1 file changed, 247 insertions(+), 32 deletions(-) diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 868df30..ef8e2ec 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -4,10 +4,18 @@ # # pip-compile --generate-hashes requirements.in # +argcomplete==3.2.2 \ + --hash=sha256:e44f4e7985883ab3e73a103ef0acd27299dbfe2dfed00142c35d4ddd3005901d \ + --hash=sha256:f3e49e8ea59b4026ee29548e24488af46e30c9de57d48638e24f54a1ea1000a2 + # via nox attrs==22.1.0 \ --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c # via gcp-releasetool +build==1.0.3 \ + --hash=sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b \ + --hash=sha256:589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f + # via -r requirements.in cachetools==4.2.4 \ --hash=sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693 \ --hash=sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1 @@ -85,11 +93,22 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via gcp-releasetool + # via + # -r requirements.in + # gcp-docuploader + # gcp-releasetool +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via + # gcp-docuploader + # nox cryptography==41.0.6 \ --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ @@ -114,17 +133,127 @@ cryptography==41.0.6 \ --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae - # via - # gcp-releasetool - # secretstorage + # via gcp-releasetool +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +docutils==0.20.1 \ + --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ + --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b + # via readme-renderer +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea + # via -r requirements.in gcp-releasetool==1.17.0 \ --hash=sha256:1a759f4b0906f4ea9dc7db3649aa11a632c72f6dc6a54f10cf57c1925d034a1c \ --hash=sha256:f23db51d85484998af5549181be726f177bf90b481de238fe7a99ec970266b6b # via -r requirements.in +google-api-core==2.17.1 \ + --hash=sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e \ + --hash=sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95 + # via + # google-cloud-core + # google-cloud-storage google-auth==2.15.0 \ --hash=sha256:6897b93556d8d807ad70701bb89f000183aea366ca7ed94680828b37437a4994 \ --hash=sha256:72f12a6cfc968d754d7bdab369c5c5c16032106e52d32c6dfd8484e4c01a6d1f - # via gcp-releasetool + # via + # gcp-releasetool + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 + # via google-cloud-storage +google-cloud-storage==2.11.0 \ + --hash=sha256:6fbf62659b83c8f3a0a743af0d661d2046c97c3a5bfb587c4662c4bc68de3e31 \ + --hash=sha256:88cbd7fb3d701c780c4272bc26952db99f25eb283fb4c2208423249f00b5fe53 + # via gcp-docuploader +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 + # via google-resumable-media +google-resumable-media==2.7.0 \ + --hash=sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b \ + --hash=sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08 + # via google-cloud-storage +googleapis-common-protos==1.62.0 \ + --hash=sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07 \ + --hash=sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277 + # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 @@ -132,19 +261,14 @@ idna==3.4 \ importlib-metadata==4.13.0 \ --hash=sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116 \ --hash=sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d - # via keyring + # via + # -r requirements.in + # keyring + # twine jaraco-classes==3.2.3 \ --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a - # via - # keyring - # keyrings-alt -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage + # via keyring jinja2==3.1.3 \ --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 @@ -152,11 +276,13 @@ jinja2==3.1.3 \ keyring==23.11.0 \ --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 - # via gcp-releasetool -keyrings-alt==4.2.0 \ - --hash=sha256:2ba3d56441ba0637f5f9c096068f67010ac0453f9d0b626de2aa3019353b6431 \ - --hash=sha256:3d25912ed71d6deec85d7e6e867963e1357cd56186a41c9295b86939a5ebf85c - # via -r requirements.in + # via + # gcp-releasetool + # twine +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich markupsafe==2.1.1 \ --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ @@ -199,14 +325,51 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py more-itertools==9.0.0 \ --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes +nh3==0.2.15 \ + --hash=sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770 \ + --hash=sha256:3277481293b868b2715907310c7be0f1b9d10491d5adf9fce11756a97e97eddf \ + --hash=sha256:3b803a5875e7234907f7d64777dfde2b93db992376f3d6d7af7f3bc347deb305 \ + --hash=sha256:427fecbb1031db085eaac9931362adf4a796428ef0163070c484b5a768e71601 \ + --hash=sha256:5f0d77272ce6d34db6c87b4f894f037d55183d9518f948bba236fe81e2bb4e28 \ + --hash=sha256:60684857cfa8fdbb74daa867e5cad3f0c9789415aba660614fe16cd66cbb9ec7 \ + --hash=sha256:6f42f99f0cf6312e470b6c09e04da31f9abaadcd3eb591d7d1a88ea931dca7f3 \ + --hash=sha256:86e447a63ca0b16318deb62498db4f76fc60699ce0a1231262880b38b6cff911 \ + --hash=sha256:8d595df02413aa38586c24811237e95937ef18304e108b7e92c890a06793e3bf \ + --hash=sha256:9c0d415f6b7f2338f93035bba5c0d8c1b464e538bfbb1d598acd47d7969284f0 \ + --hash=sha256:a5167a6403d19c515217b6bcaaa9be420974a6ac30e0da9e84d4fc67a5d474c5 \ + --hash=sha256:ac19c0d68cd42ecd7ead91a3a032fdfff23d29302dbb1311e641a130dfefba97 \ + --hash=sha256:b1e97221cedaf15a54f5243f2c5894bb12ca951ae4ddfd02a9d4ea9df9e1a29d \ + --hash=sha256:bc2d086fb540d0fa52ce35afaded4ea526b8fc4d3339f783db55c95de40ef02e \ + --hash=sha256:d1e30ff2d8d58fb2a14961f7aac1bbb1c51f9bdd7da727be35c63826060b0bf3 \ + --hash=sha256:f3b53ba93bb7725acab1e030bc2ecd012a817040fd7851b332f86e2f9bb98dc6 + # via readme-renderer +nox==2023.4.22 \ + --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ + --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f + # via -r requirements.in packaging==23.0 \ --hash=sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2 \ --hash=sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97 - # via gcp-releasetool + # via + # build + # gcp-releasetool + # nox +pkginfo==1.9.6 \ + --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ + --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 + # via twine +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ @@ -230,7 +393,11 @@ protobuf==3.20.3 \ --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee - # via gcp-releasetool + # via + # gcp-docuploader + # gcp-releasetool + # google-api-core + # googleapis-common-protos pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba @@ -245,6 +412,12 @@ pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi +pygments==2.17.2 \ + --hash=sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c \ + --hash=sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367 + # via + # readme-renderer + # rich pyjwt==2.6.0 \ --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 @@ -252,36 +425,78 @@ pyjwt==2.6.0 \ pyperclip==1.8.2 \ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 # via gcp-releasetool +pyproject-hooks==1.0.0 \ + --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ + --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 + # via build python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool +readme-renderer==43.0 \ + --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ + --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 + # via twine requests==2.31.0 \ --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 - # via gcp-releasetool + # via + # gcp-releasetool + # google-api-core + # google-cloud-storage + # requests-toolbelt + # twine +requests-toolbelt==1.0.0 \ + --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ + --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 + # via twine +rfc3986==2.0.0 \ + --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ + --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c + # via twine +rich==13.7.0 \ + --hash=sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa \ + --hash=sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235 + # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via + # gcp-docuploader # google-auth # python-dateutil +twine==5.0.0 \ + --hash=sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4 \ + --hash=sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0 + # via -r requirements.in +typing-extensions==4.10.0 \ + --hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \ + --hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb + # via -r requirements.in urllib3==1.26.18 \ --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 - # via requests + # via + # requests + # twine +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox +wheel==0.42.0 \ + --hash=sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d \ + --hash=sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8 + # via -r requirements.in zipp==3.11.0 \ --hash=sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa \ --hash=sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766 # via importlib-metadata -build==1.0.3 \ - --hash=sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b \ - --hash=sha256:589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f + +# WARNING: The following packages were not pinned, but pip requires them to be +# pinned when the requirements file includes hashes and the requirement is not +# satisfied by a package already installed. Consider using the --allow-unsafe flag. +# setuptools From 1a3457bd92dfe2bdc62dc0f3f48473f49ddacae9 Mon Sep 17 00:00:00 2001 From: Averi Kitsch Date: Mon, 26 Feb 2024 09:56:25 -0800 Subject: [PATCH 04/23] chore: add deps to kokoro release jobs (#39) --- .kokoro/requirements.in | 3 ++- .kokoro/requirements.txt | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index 58b015f..fc110a4 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -8,4 +8,5 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 -build \ No newline at end of file +build +tomli \ No newline at end of file diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index ef8e2ec..dbcedba 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -469,6 +469,10 @@ six==1.16.0 \ # gcp-docuploader # google-auth # python-dateutil +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via -r requirements.in twine==5.0.0 \ --hash=sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4 \ --hash=sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0 From 8da600106bbc3e88540a4d467ed6870a01eac954 Mon Sep 17 00:00:00 2001 From: Averi Kitsch Date: Mon, 26 Feb 2024 10:25:06 -0800 Subject: [PATCH 05/23] chore: add deps to kokoro release jobs (#40) --- .kokoro/requirements.in | 3 ++- .kokoro/requirements.txt | 12 +++++++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index fc110a4..ea07d57 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -9,4 +9,5 @@ nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 build -tomli \ No newline at end of file +tomli +secretstorage \ No newline at end of file diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index dbcedba..5da0d2a 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -133,7 +133,9 @@ cryptography==41.0.6 \ --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae - # via gcp-releasetool + # via + # gcp-releasetool + # secretstorage distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 @@ -269,6 +271,10 @@ jaraco-classes==3.2.3 \ --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via secretstorage jinja2==3.1.3 \ --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 @@ -462,6 +468,10 @@ rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 # via google-auth +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + # via -r requirements.in six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 From b1a98a5cdad91f2000fb67a2fe30562fa72717e4 Mon Sep 17 00:00:00 2001 From: Averi Kitsch Date: Tue, 5 Mar 2024 13:07:23 -0800 Subject: [PATCH 06/23] chore: Update header-checker-lint.yml (#42) --- .github/header-checker-lint.yml | 36 +++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml index 6fe78aa..1743b48 100644 --- a/.github/header-checker-lint.yml +++ b/.github/header-checker-lint.yml @@ -1,15 +1,21 @@ -{"allowedCopyrightHolders": ["Google LLC"], - "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], - "sourceFileExtensions": [ - "ts", - "js", - "java", - "sh", - "Dockerfile", - "yaml", - "py", - "html", - "txt" - ] -} \ No newline at end of file +allowedCopyrightHolders: + - "Google LLC" +allowedLicenses: + - "Apache-2.0" +sourceFileExtensions: + - "yaml" + - "yml" + - "sh" + - "proto" + - "Dockerfile" + - "py" + - "html" + - "text" +ignoreFiles: + - ".github/release-please.yml" + - ".github/release-trigger.yml" + - ".github/header-checker-lint.yml" + - ".github/sync-repo-settings.yaml" + - ".kokoro/**" + - "**/requirements.txt" + - "**/requirements-test.txt" From e5ab9372a5773cd9ad79808af4d24de171b976a4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Mar 2024 14:39:00 -0800 Subject: [PATCH 07/23] chore(deps): bump cryptography from 42.0.0 to 42.0.4 in /.kokoro (#30) Bumps [cryptography](https://github.com/pyca/cryptography) from 42.0.0 to 42.0.4. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/42.0.0...42.0.4) --- updated-dependencies: - dependency-name: cryptography dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Averi Kitsch --- .kokoro/requirements.txt | 65 ++++++++++++++++++++++++---------------- 1 file changed, 39 insertions(+), 26 deletions(-) diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 5da0d2a..251a6e4 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -109,30 +109,39 @@ colorlog==6.8.2 \ # via # gcp-docuploader # nox -cryptography==41.0.6 \ - --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ - --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ - --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ - --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ - --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ - --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ - --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ - --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ - --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ - --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ - --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ - --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ - --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ - --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ - --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ - --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ - --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ - --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ - --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ - --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ - --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ - --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ - --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae +cryptography==42.0.4 \ + --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ + --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ + --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ + --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ + --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ + --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ + --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ + --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ + --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ + --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ + --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ + --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ + --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ + --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ + --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ + --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ + --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ + --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ + --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ + --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ + --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ + --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ + --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ + --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ + --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ + --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ + --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ + --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ + --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ + --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ + --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ + --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 # via # gcp-releasetool # secretstorage @@ -274,7 +283,9 @@ jaraco-classes==3.2.3 \ jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via secretstorage + # via + # keyring + # secretstorage jinja2==3.1.3 \ --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 @@ -471,7 +482,9 @@ rsa==4.9 \ secretstorage==3.3.3 \ --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via -r requirements.in + # via + # -r requirements.in + # keyring six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 From b29ae68bf68f3508bf1c7c81825fd146aefd6dd3 Mon Sep 17 00:00:00 2001 From: Averi Kitsch Date: Tue, 5 Mar 2024 15:00:09 -0800 Subject: [PATCH 08/23] chore(ci): add fallback lint (#43) * chore(ci): add fallback lint * Update lint_fallback.yml --- .github/workflows/lint.yml | 12 ++++++++++-- .github/workflows/lint_fallback.yml | 30 +++++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/lint_fallback.yml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 6e1f4c0..2352d4b 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -15,12 +15,16 @@ name: Lint on: pull_request: - paths-ignore: + paths-ignore: # Changes to the paths list need to be reflected in lint_fallback.yml - "*.md" + - ".kokoro/**" + - ".github/**" pull_request_target: types: [labeled] paths-ignore: - "*.md" + - ".kokoro/**" + - ".github/**" jobs: lint: @@ -52,7 +56,11 @@ jobs: } - name: Checkout Repository - uses: actions/checkout@v4 + uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} + repository: ${{ github.event.pull_request.head.repo.full_name }} + token: ${{ secrets.GITHUB_TOKEN }} - name: Setup Python uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 diff --git a/.github/workflows/lint_fallback.yml b/.github/workflows/lint_fallback.yml new file mode 100644 index 0000000..04841b9 --- /dev/null +++ b/.github/workflows/lint_fallback.yml @@ -0,0 +1,30 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Lint +on: + pull_request: + paths: # These paths are the inverse of lint.yml + - "*.md" + - ".kokoro/**" + - ".github/**" + +jobs: + lint: + runs-on: ubuntu-latest + permissions: + contents: none + + steps: + - run: echo "No tests required." From 28d92cbbaa0f07b4001276977b4756314e7c3918 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 13:20:57 -0800 Subject: [PATCH 09/23] chore(deps): bump langchain from 0.1.8 to 0.1.11 (#45) * chore(deps): bump langchain from 0.1.8 to 0.1.11 Bumps [langchain](https://github.com/langchain-ai/langchain) from 0.1.8 to 0.1.11. - [Release notes](https://github.com/langchain-ai/langchain/releases) - [Commits](https://github.com/langchain-ai/langchain/compare/v0.1.8...v0.1.11) --- updated-dependencies: - dependency-name: langchain dependency-type: direct:production ... Signed-off-by: dependabot[bot] * Update requirements.txt --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Averi Kitsch --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 27b775b..fa1e309 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -langchain==0.1.8 -langchain-community==0.0.21 +langchain==0.1.11 +langchain-community==0.0.25 SQLAlchemy==2.0.27 cloud-sql-python-connector[pymysql]==1.7.0 From 54fbab5fd41e7b49a2d5da800afad5d3fb66b40c Mon Sep 17 00:00:00 2001 From: Averi Kitsch Date: Thu, 7 Mar 2024 12:46:47 -0800 Subject: [PATCH 10/23] docs: add github links (#46) --- docs/chat_message_history.ipynb | 33 ++++++---------------------- docs/document_loader.ipynb | 39 +++++++++++++++------------------ 2 files changed, 25 insertions(+), 47 deletions(-) diff --git a/docs/chat_message_history.ipynb b/docs/chat_message_history.ipynb index cfda0aa..5cb1a1f 100644 --- a/docs/chat_message_history.ipynb +++ b/docs/chat_message_history.ipynb @@ -13,6 +13,8 @@ "\n", "This notebook goes over how to use `Cloud SQL for MySQL` to store chat message history with the `MySQLChatMessageHistory` class.\n", "\n", + "Learn more about the package on [GitHub](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/).\n", + "\n", "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/googleapis/langchain-google-cloud-sql-mysql-python/blob/main/docs/chat_message_history.ipynb)" ] }, @@ -26,7 +28,9 @@ "## Before You Begin\n", "\n", "To run this notebook, you will need to do the following:\n", + "\n", " * [Create a Google Cloud Project](https://developers.google.com/workspace/guides/create-project)\n", + " * [Enable the Cloud SQL Admin API.](https://console.cloud.google.com/marketplace/product/google/sqladmin.googleapis.com)\n", " * [Create a Cloud SQL for MySQL instance](https://cloud.google.com/sql/docs/mysql/create-instance)\n", " * [Create a Cloud SQL database](https://cloud.google.com/sql/docs/mysql/create-manage-databases)\n", " * [Add an IAM database user to the database](https://cloud.google.com/sql/docs/mysql/add-manage-iam-users#creating-a-database-user) (Optional)" @@ -144,30 +148,6 @@ "!gcloud config set project {PROJECT_ID}" ] }, - { - "cell_type": "markdown", - "id": "rEWWNoNnKOgq", - "metadata": { - "id": "rEWWNoNnKOgq" - }, - "source": [ - "### 💡 API Enablement\n", - "The `langchain-google-cloud-sql-mysql` package requires that you [enable the Cloud SQL Admin API](https://console.cloud.google.com/flows/enableapi?apiid=sqladmin.googleapis.com) in your Google Cloud Project." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "5utKIdq7KYi5", - "metadata": { - "id": "5utKIdq7KYi5" - }, - "outputs": [], - "source": [ - "# enable Cloud SQL Admin API\n", - "!gcloud services enable sqladmin.googleapis.com" - ] - }, { "cell_type": "markdown", "id": "f8f2830ee9ca1e01", @@ -218,7 +198,7 @@ "\n", "To create a `MySQLEngine` using `MySQLEngine.from_instance()` you need to provide only 4 things:\n", "\n", - "1. `project_id` : Project ID of the Google Cloud Project where the Cloud SQL instance is located.\n", + "1. `project_id` : Project ID of the Google Cloud Project where the Cloud SQL instance is located.\n", "1. `region` : Region where the Cloud SQL instance is located.\n", "1. `instance` : The name of the Cloud SQL instance.\n", "1. `database` : The name of the database to connect to on the Cloud SQL instance.\n", @@ -230,6 +210,7 @@ "* [Manage users with IAM database authentication](https://cloud.google.com/sql/docs/mysql/add-manage-iam-users)\n", "\n", "Optionally, [built-in database authentication](https://cloud.google.com/sql/docs/mysql/built-in-authentication) using a username and password to access the Cloud SQL database can also be used. Just provide the optional `user` and `password` arguments to `MySQLEngine.from_instance()`:\n", + "\n", "* `user` : Database user to use for built-in database authentication and login\n", "* `password` : Database password to use for built-in database authentication and login.\n" ] @@ -545,7 +526,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.1" + "version": "3.11.5" } }, "nbformat": 4, diff --git a/docs/document_loader.ipynb b/docs/document_loader.ipynb index 9abab20..a09a517 100644 --- a/docs/document_loader.ipynb +++ b/docs/document_loader.ipynb @@ -6,10 +6,12 @@ "source": [ "# Google Cloud SQL for MySQL\n", "\n", - "> [Cloud SQL](https://cloud.google.com/sql) is a fully managed relational database service that offers high performance, seamless integration, and impressive scalability. It offers [MySQL](https://cloud.google.com/sql/mysql), [PostgreSQL](https://cloud.google.com/sql/postgres), and [SQL Server](https://cloud.google.com/sql/sqlserver) database engines. Extend your database application to build AI-powered experiences leveraging Cloud SQL's Langchain integrations.\n", + "> [Cloud SQL](https://cloud.google.com/sql) is a fully managed relational database service that offers high performance, seamless integration, and impressive scalability. It offers [MySQL](https://cloud.google.com/sql/mysql), [PostgreSQL](https://cloud.google.com/sql/postgresql), and [SQL Server](https://cloud.google.com/sql/sqlserver) database engines. Extend your database application to build AI-powered experiences leveraging Cloud SQL's Langchain integrations.\n", "\n", "This notebook goes over how to use [Cloud SQL for MySQL](https://cloud.google.com/sql/mysql) to [save, load and delete langchain documents](https://python.langchain.com/docs/modules/data_connection/document_loaders/) with `MySQLLoader` and `MySQLDocumentSaver`.\n", "\n", + "Learn more about the package on [GitHub](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/).\n", + "\n", "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/googleapis/langchain-google-cloud-sql-mysql-python/blob/main/docs/document_loader.ipynb)" ] }, @@ -20,7 +22,9 @@ "## Before You Begin\n", "\n", "To run this notebook, you will need to do the following:\n", + "\n", "* [Create a Google Cloud Project](https://developers.google.com/workspace/guides/create-project)\n", + "* [Enable the Cloud SQL Admin API.](https://console.cloud.google.com/marketplace/product/google/sqladmin.googleapis.com)\n", "* [Create a Cloud SQL for MySQL instance](https://cloud.google.com/sql/docs/mysql/create-instance)\n", "* [Create a Cloud SQL database](https://cloud.google.com/sql/docs/mysql/create-manage-databases)\n", "* [Add an IAM database user to the database](https://cloud.google.com/sql/docs/mysql/add-manage-iam-users#creating-a-database-user) (Optional)\n", @@ -136,24 +140,6 @@ "auth.authenticate_user()" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### API Enablement\n", - "The `langchain-google-cloud-sql-mysql` package requires that you [enable the Cloud SQL Admin API](https://console.cloud.google.com/flows/enableapi?apiid=sqladmin.googleapis.com) in your Google Cloud Project." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# enable Cloud SQL Admin API\n", - "!gcloud services enable sqladmin.googleapis.com" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -179,10 +165,12 @@ "By default, [IAM database authentication](https://cloud.google.com/sql/docs/mysql/iam-authentication#iam-db-auth) will be used as the method of database authentication. This library uses the IAM principal belonging to the [Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/application-default-credentials) sourced from the envionment.\n", "\n", "For more informatin on IAM database authentication please see:\n", + "\n", "* [Configure an instance for IAM database authentication](https://cloud.google.com/sql/docs/mysql/create-edit-iam-instances)\n", "* [Manage users with IAM database authentication](https://cloud.google.com/sql/docs/mysql/add-manage-iam-users)\n", "\n", "Optionally, [built-in database authentication](https://cloud.google.com/sql/docs/mysql/built-in-authentication) using a username and password to access the Cloud SQL database can also be used. Just provide the optional `user` and `password` arguments to `MySQLEngine.from_instance()`:\n", + "\n", "* `user` : Database user to use for built-in database authentication and login\n", "* `password` : Database password to use for built-in database authentication and login." ] @@ -207,6 +195,7 @@ "### Initialize a table\n", "\n", "Initialize a table of default schema via `MySQLEngine.init_document_table()`. Table Columns:\n", + "\n", "- page_content (type: text)\n", "- langchain_metadata (type: JSON)\n", "\n", @@ -229,6 +218,7 @@ "### Save documents\n", "\n", "Save langchain documents with `MySQLDocumentSaver.add_documents()`. To initialize `MySQLDocumentSaver` class you need to provide 2 things:\n", + "\n", "1. `engine` - An instance of a `MySQLEngine` engine.\n", "2. `table_name` - The name of the table within the Cloud SQL database to store langchain documents." ] @@ -241,8 +231,8 @@ }, "outputs": [], "source": [ - "from langchain_google_cloud_sql_mysql import MySQLDocumentSaver\n", "from langchain_core.documents import Document\n", + "from langchain_google_cloud_sql_mysql import MySQLDocumentSaver\n", "\n", "test_docs = [\n", " Document(\n", @@ -274,6 +264,7 @@ "metadata": {}, "source": [ "Load langchain documents with `MySQLLoader.load()` or `MySQLLoader.lazy_load()`. `lazy_load` returns a generator that only queries database during the iteration. To initialize `MySQLLoader` class you need to provide:\n", + "\n", "1. `engine` - An instance of a `MySQLEngine` engine.\n", "2. `table_name` - The name of the table within the Cloud SQL database to store langchain documents." ] @@ -345,6 +336,7 @@ "For table with default schema (page_content, langchain_metadata), the deletion criteria is:\n", "\n", "A `row` should be deleted if there exists a `document` in the list, such that\n", + "\n", "- `document.page_content` equals `row[page_content]`\n", "- `document.metadata` equals `row[langchain_metadata]`" ] @@ -402,7 +394,7 @@ " CREATE TABLE IF NOT EXISTS `{TABLE_NAME}`(\n", " fruit_id INT AUTO_INCREMENT PRIMARY KEY,\n", " fruit_name VARCHAR(100) NOT NULL,\n", - " variety VARCHAR(50), \n", + " variety VARCHAR(50),\n", " quantity_in_stock INT NOT NULL,\n", " price_per_unit DECIMAL(6,2) NOT NULL,\n", " organic TINYINT(1) NOT NULL\n", @@ -449,6 +441,7 @@ "metadata": {}, "source": [ "We can specify the content and metadata we want to load by setting the `content_columns` and `metadata_columns` when initializing the `MySQLLoader`.\n", + "\n", "1. `content_columns`: The columns to write into the `page_content` of the document.\n", "2. `metadata_columns`: The columns to write into the `metadata` of the document.\n", "\n", @@ -487,12 +480,14 @@ "metadata": {}, "source": [ "In order to save langchain document into table with customized metadata fields. We need first create such a table via `MySQLEngine.init_document_table()`, and specify the list of `metadata_columns` we want it to have. In this example, the created table will have table columns:\n", + "\n", "- description (type: text): for storing fruit description.\n", "- fruit_name (type text): for storing fruit name.\n", "- organic (type tinyint(1)): to tell if the fruit is organic.\n", "- other_metadata (type: JSON): for storing other metadata information of the fruit.\n", "\n", "We can use the following parameters with `MySQLEngine.init_document_table()` to create the table:\n", + "\n", "1. `table_name`: The name of the table within the Cloud SQL database to store langchain documents.\n", "2. `metadata_columns`: A list of `sqlalchemy.Column` indicating the list of metadata columns we need.\n", "3. `content_column`: The name of column to store `page_content` of langchain document. Default: `page_content`.\n", @@ -532,6 +527,7 @@ "metadata": {}, "source": [ "Save documents with `MySQLDocumentSaver.add_documents()`. As you can see in this example, \n", + "\n", "- `document.page_content` will be saved into `description` column.\n", "- `document.metadata.fruit_name` will be saved into `fruit_name` column.\n", "- `document.metadata.organic` will be saved into `organic` column.\n", @@ -585,6 +581,7 @@ "We can also delete documents from table with customized metadata columns via `MySQLDocumentSaver.delete()`. The deletion criteria is:\n", "\n", "A `row` should be deleted if there exists a `document` in the list, such that\n", + "\n", "- `document.page_content` equals `row[page_content]`\n", "- For every metadata field `k` in `document.metadata`\n", " - `document.metadata[k]` equals `row[k]` or `document.metadata[k]` equals `row[langchain_metadata][k]`\n", From 38f2754a214d05d0bb34eada3aaf8fb4946db5a3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 15 Mar 2024 23:47:05 +0100 Subject: [PATCH 11/23] chore(deps): update python-nonmajor (#48) --- .kokoro/requirements.txt | 411 ++++++++++++++++++++------------------- pyproject.toml | 8 +- requirements.txt | 8 +- 3 files changed, 219 insertions(+), 208 deletions(-) diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 251a6e4..92a6ed0 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -4,91 +4,79 @@ # # pip-compile --generate-hashes requirements.in # -argcomplete==3.2.2 \ - --hash=sha256:e44f4e7985883ab3e73a103ef0acd27299dbfe2dfed00142c35d4ddd3005901d \ - --hash=sha256:f3e49e8ea59b4026ee29548e24488af46e30c9de57d48638e24f54a1ea1000a2 +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c # via nox -attrs==22.1.0 \ - --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ - --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c +attrs==22.2.0 \ + --hash=sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836 \ + --hash=sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99 # via gcp-releasetool -build==1.0.3 \ - --hash=sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b \ - --hash=sha256:589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f +build==1.1.1 \ + --hash=sha256:8ed0851ee76e6e38adce47e4bee3b51c771d86c64cf578d0c2245567ee200e73 \ + --hash=sha256:8eea65bb45b1aac2e734ba2cc8dad3a6d97d97901a395bd0ed3e7b46953d2a31 # via -r requirements.in cachetools==4.2.4 \ --hash=sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693 \ --hash=sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1 # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2023.11.17 \ + --hash=sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1 \ + --hash=sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474 # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -96,9 +84,9 @@ charset-normalizer==2.1.1 \ # via # -r requirements.in # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb +click==8.1.7 \ + --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ + --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de # via # -r requirements.in # gcp-docuploader @@ -109,39 +97,39 @@ colorlog==6.8.2 \ # via # gcp-docuploader # nox -cryptography==42.0.4 \ - --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ - --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ - --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ - --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ - --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ - --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ - --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ - --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ - --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ - --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ - --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ - --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ - --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ - --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ - --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ - --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ - --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ - --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ - --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ - --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ - --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ - --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ - --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ - --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ - --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ - --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ - --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ - --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ - --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ - --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ - --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ - --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via # gcp-releasetool # secretstorage @@ -171,9 +159,9 @@ google-api-core==2.17.1 \ # via # google-cloud-core # google-cloud-storage -google-auth==2.15.0 \ - --hash=sha256:6897b93556d8d807ad70701bb89f000183aea366ca7ed94680828b37437a4994 \ - --hash=sha256:72f12a6cfc968d754d7bdab369c5c5c16032106e52d32c6dfd8484e4c01a6d1f +google-auth==2.28.2 \ + --hash=sha256:80b8b4969aa9ed5938c7828308f20f035bc79f9d8fb8120bf9dc8db20b41ba30 \ + --hash=sha256:9fd67bbcd40f16d9d42f950228e9cf02a2ded4ae49198b27432d0cded5a74c38 # via # gcp-releasetool # google-api-core @@ -183,9 +171,9 @@ google-cloud-core==2.4.1 \ --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.11.0 \ - --hash=sha256:6fbf62659b83c8f3a0a743af0d661d2046c97c3a5bfb587c4662c4bc68de3e31 \ - --hash=sha256:88cbd7fb3d701c780c4272bc26952db99f25eb283fb4c2208423249f00b5fe53 +google-cloud-storage==2.15.0 \ + --hash=sha256:5d9237f88b648e1d724a0f20b5cde65996a37fe51d75d17660b1404097327dd2 \ + --hash=sha256:7560a3c48a03d66c553dc55215d35883c680fe0ab44c23aa4832800ccc855c74 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -261,13 +249,13 @@ google-resumable-media==2.7.0 \ --hash=sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b \ --hash=sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08 # via google-cloud-storage -googleapis-common-protos==1.62.0 \ - --hash=sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07 \ - --hash=sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277 +googleapis-common-protos==1.63.0 \ + --hash=sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e \ + --hash=sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632 # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +idna==3.6 \ + --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \ + --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f # via requests importlib-metadata==4.13.0 \ --hash=sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116 \ @@ -276,7 +264,7 @@ importlib-metadata==4.13.0 \ # -r requirements.in # keyring # twine -jaraco-classes==3.2.3 \ +jaraco-classes==3.3.1 \ --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring @@ -290,9 +278,9 @@ jinja2==3.1.3 \ --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via gcp-releasetool -keyring==23.11.0 \ - --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ - --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 +keyring==23.13.1 \ + --hash=sha256:771ed2a91909389ed6148631de678f82ddc73737d85a927f382a8a1b157898cd \ + --hash=sha256:ba2e15a9b35e21908d0aaf4e0a47acc52d6ae33444df0da2b49d41a46ef6d678 # via # gcp-releasetool # twine @@ -300,55 +288,75 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 +MarkupSafe==2.1.5 \ + --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ + --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ + --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ + --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ + --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ + --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ + --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ + --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ + --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ + --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ + --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ + --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ + --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ + --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ + --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ + --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ + --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ + --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ + --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ + --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ + --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ + --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ + --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ + --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ + --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ + --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ + --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ + --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ + --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ + --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ + --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ + --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ + --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ + --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ + --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ + --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ + --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ + --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ + --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ + --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ + --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ + --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ + --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ + --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ + --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ + --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ + --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ + --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ + --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ + --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ + --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ + --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ + --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ + --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ + --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ + --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ + --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ + --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ + --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ + --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==9.0.0 \ - --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ - --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab +more-itertools==9.1.0 \ + --hash=sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d \ + --hash=sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3 # via jaraco-classes nh3==0.2.15 \ --hash=sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770 \ @@ -372,16 +380,16 @@ nox==2023.4.22 \ --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f # via -r requirements.in -packaging==23.0 \ - --hash=sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2 \ - --hash=sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97 +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via # build # gcp-releasetool # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 +pkginfo==1.10.0 \ + --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ + --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine platformdirs==4.2.0 \ --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ @@ -415,15 +423,15 @@ protobuf==3.20.3 \ # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba +pyasn1==0.5.1 \ + --hash=sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58 \ + --hash=sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c # via # pyasn1-modules # rsa -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ @@ -435,9 +443,9 @@ pygments==2.17.2 \ # via # readme-renderer # rich -pyjwt==2.6.0 \ - --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ - --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 +PyJWT==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool pyperclip==1.8.2 \ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 @@ -446,9 +454,9 @@ pyproject-hooks==1.0.0 \ --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 # via build -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-dateutil==2.9.0.post0 \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool readme-renderer==43.0 \ --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ @@ -471,9 +479,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.7.0 \ - --hash=sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa \ - --hash=sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235 +rich==13.7.1 \ + --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ + --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -514,16 +522,19 @@ virtualenv==20.25.1 \ --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 # via nox -wheel==0.42.0 \ - --hash=sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d \ - --hash=sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8 +wheel==0.43.0 \ + --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ + --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 # via -r requirements.in -zipp==3.11.0 \ - --hash=sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa \ - --hash=sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766 +zipp==3.18.1 \ + --hash=sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b \ + --hash=sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715 # via importlib-metadata # WARNING: The following packages were not pinned, but pip requires them to be # pinned when the requirements file includes hashes and the requirement is not # satisfied by a package already installed. Consider using the --allow-unsafe flag. # setuptools +jaraco.classes==3.3.1 \ + --hash=sha256:86b534de565381f6b3c1c830d13f931d7be1a75f0081c57dff615578676e2206 \ + --hash=sha256:cb28a5ebda8bc47d8c8015307d93163464f9f2b91ab4006e09ff0ce07e8bfb30 diff --git a/pyproject.toml b/pyproject.toml index d3e448c..56918e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,11 +26,11 @@ Changelog = "https://github.com/googleapis/langchain-google-cloud-sql-mysql-pyth [project.optional-dependencies] test = [ - "black[jupyter]==24.2.0", + "black[jupyter]==24.3.0", "isort==5.13.2", - "mypy==1.8.0", - "pytest-asyncio==0.23.5", - "pytest==8.0.1" + "mypy==1.9.0", + "pytest-asyncio==0.23.5.post1", + "pytest==8.1.1" ] [build-system] diff --git a/requirements.txt b/requirements.txt index fa1e309..ae62b02 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -langchain==0.1.11 -langchain-community==0.0.25 -SQLAlchemy==2.0.27 -cloud-sql-python-connector[pymysql]==1.7.0 +langchain==0.1.12 +langchain-community==0.0.28 +SQLAlchemy==2.0.28 +cloud-sql-python-connector[pymysql]==1.8.0 From bc45740d83810a61e424a959d82cdd0e089dca34 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 15 Mar 2024 23:48:24 +0100 Subject: [PATCH 12/23] chore(deps): update dependency gcp-releasetool to v2 (#49) Co-authored-by: Averi Kitsch --- .kokoro/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 92a6ed0..8db52d8 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -149,9 +149,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.17.0 \ - --hash=sha256:1a759f4b0906f4ea9dc7db3649aa11a632c72f6dc6a54f10cf57c1925d034a1c \ - --hash=sha256:f23db51d85484998af5549181be726f177bf90b481de238fe7a99ec970266b6b +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.17.1 \ --hash=sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e \ From 3439c9d6a277a95da835f1c59d4727855a187dee Mon Sep 17 00:00:00 2001 From: Averi Kitsch Date: Mon, 18 Mar 2024 16:50:49 -0700 Subject: [PATCH 13/23] feat(ci): run tests against multiple versions (#51) * feat(ci): run tests against multiple versions * add uuid to test * fix and lint * fix table name * fix type * Update DEVELOPER.md * Update DEVELOPER.md --- .github/sync-repo-settings.yaml | 6 +- DEVELOPER.md | 81 +++++++++++++++++++ integration.cloudbuild.yaml | 30 ++++--- .../loader.py | 3 +- .../test_mysql_chat_message_history.py | 21 +++-- 5 files changed, 119 insertions(+), 22 deletions(-) create mode 100644 DEVELOPER.md diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 47d86a2..5d0f9a8 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -27,7 +27,11 @@ branchProtectionRules: requiredStatusCheckContexts: - "cla/google" - "lint" - - "mysql-integration-test-pr (langchain-cloud-sql-testing)" + - "mysql-integration-test-pr-py38 (langchain-cloud-sql-testing)" + - "mysql-integration-test-pr-py39 (langchain-cloud-sql-testing)" + - "mysql-integration-test-pr-py310 (langchain-cloud-sql-testing)" + - "mysql-integration-test-pr-py311 (langchain-cloud-sql-testing)" + - "mysql-integration-test-pr-py312 (langchain-cloud-sql-testing)" - "conventionalcommits.org" - "header-check" # - Add required status checks like presubmit tests diff --git a/DEVELOPER.md b/DEVELOPER.md new file mode 100644 index 0000000..a338170 --- /dev/null +++ b/DEVELOPER.md @@ -0,0 +1,81 @@ +# DEVELOPER.md + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + +## Processes + +### Conventional Commit messages + +This repository uses tool [Release Please](https://github.com/googleapis/release-please) to create GitHub and PyPi releases. It does so by parsing your +git history, looking for [Conventional Commit messages](https://www.conventionalcommits.org/), +and creating release PRs. + +Learn more by reading [How should I write my commits?](https://github.com/googleapis/release-please?tab=readme-ov-file#how-should-i-write-my-commits) + +## Testing + +### Run tests locally + +1. Set environment variables for `INSTANCE_ID`, `DB_NAME`, `TABLE_NAME`, `REGION`, `DB_USER`, `DB_PASSWORD` + +1. Run pytest to automatically run all tests: + + ```bash + pytest + ``` + +### CI Platform Setup + +Cloud Build is used to run tests against Google Cloud resources in test project: langchain-cloud-sql-testing. +Each test has a corresponding Cloud Build trigger, see [all triggers][triggers]. +These tests are registered as required tests in `.github/sync-repo-settings.yaml`. + +#### Trigger Setup + +Cloud Build triggers (for Python versions 3.8 to 3.11) were created with the following specs: + +```YAML +name: mysql-integration-test-pr-py38 +description: Run integration tests on PR for Python 3.8 +filename: integration.cloudbuild.yaml +github: + name: langchain-google-cloud-sql-mysql-python + owner: googleapis + pullRequest: + branch: .* + commentControl: COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY +ignoredFiles: + - docs/** + - .kokoro/** + - .github/** + - "*.md" +substitutions: + _INSTANCE_ID: + _DB_NAME: + _REGION: us-central1 + _VERSION: "3.8" +``` + +Use `gcloud builds triggers import --source=trigger.yaml` create triggers via the command line + +#### Project Setup + +1. Create an Cloud SQL for PostgreSQL instance and database +1. Setup Cloud Build triggers (above) + +#### Run tests with Cloud Build + +* Run integration test: + + ```bash + gcloud builds submit --config integration.cloudbuild.yaml --region us-central1 --substitutions=_INSTANCE_ID=$INSTANCE_ID,_DB_NAME=$DB_NAME,_REGION=$REGION + ``` + +#### Trigger + +To run Cloud Build tests on GitHub from external contributors, ie RenovateBot, comment: `/gcbrun`. + + +[triggers]: https://console.cloud.google.com/cloud-build/triggers?e=13802955&project=langchain-cloud-sql-testing diff --git a/integration.cloudbuild.yaml b/integration.cloudbuild.yaml index 0575e89..61e569b 100644 --- a/integration.cloudbuild.yaml +++ b/integration.cloudbuild.yaml @@ -14,35 +14,39 @@ steps: - id: Install dependencies - name: python:3.11 + name: python:${_VERSION} entrypoint: pip args: ["install", "--user", "-r", "requirements.txt"] - id: Install module (and test requirements) - name: python:3.11 + name: python:${_VERSION} entrypoint: pip args: ["install", ".[test]", "--user"] - id: Run integration tests - name: python:3.11 + name: python:${_VERSION} entrypoint: python args: ["-m", "pytest"] env: - - 'PROJECT_ID=$PROJECT_ID' - - 'INSTANCE_ID=$_INSTANCE_ID' - - 'DB_NAME=$_DB_NAME' - - 'TABLE_NAME=test-$BUILD_ID' - - 'REGION=$_REGION' - secretEnv: ['DB_USER', 'DB_PASSWORD'] + - "PROJECT_ID=$PROJECT_ID" + - "INSTANCE_ID=$_INSTANCE_ID" + - "DB_NAME=$_DB_NAME" + - "TABLE_NAME=test-$BUILD_ID" + - "REGION=$_REGION" + secretEnv: ["DB_USER", "DB_PASSWORD"] availableSecrets: secretManager: - - versionName: projects/$PROJECT_ID/secrets/langchain-test-mysql-username/versions/1 - env: 'DB_USER' - - versionName: projects/$PROJECT_ID/secrets/langchain-test-mysql-password/versions/1 - env: 'DB_PASSWORD' + - versionName: projects/$PROJECT_ID/secrets/langchain-test-mysql-username/versions/1 + env: "DB_USER" + - versionName: projects/$PROJECT_ID/secrets/langchain-test-mysql-password/versions/1 + env: "DB_PASSWORD" substitutions: _INSTANCE_ID: test-instance _REGION: us-central1 _DB_NAME: test + _VERSION: "3.8" + +options: + dynamicSubstitutions: true diff --git a/src/langchain_google_cloud_sql_mysql/loader.py b/src/langchain_google_cloud_sql_mysql/loader.py index 02345e7..26bfaaa 100644 --- a/src/langchain_google_cloud_sql_mysql/loader.py +++ b/src/langchain_google_cloud_sql_mysql/loader.py @@ -12,8 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import json -from collections.abc import Iterable -from typing import Any, Dict, Iterator, List, Optional, Sequence, cast +from typing import Any, Dict, Iterable, Iterator, List, Optional, cast import pymysql import sqlalchemy diff --git a/tests/integration/test_mysql_chat_message_history.py b/tests/integration/test_mysql_chat_message_history.py index c437392..3929d35 100644 --- a/tests/integration/test_mysql_chat_message_history.py +++ b/tests/integration/test_mysql_chat_message_history.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import os +import uuid from typing import Generator import pytest @@ -25,17 +26,21 @@ region = os.environ["REGION"] instance_id = os.environ["INSTANCE_ID"] db_name = os.environ["DB_NAME"] -table_name = "message_store" +table_name = "message_store" + str(uuid.uuid4()) +malformed_table = "malformed_table" + str(uuid.uuid4()) @pytest.fixture(name="memory_engine") def setup() -> Generator: engine = MySQLEngine.from_instance( - project_id=project_id, region=region, instance=instance_id, database=db_name + project_id=project_id, + region=region, + instance=instance_id, + database=db_name, ) # create table with malformed schema (missing 'type') - query = """CREATE TABLE malformed_table ( + query = f"""CREATE TABLE `{malformed_table}` ( id INT AUTO_INCREMENT PRIMARY KEY, session_id TEXT NOT NULL, data JSON NOT NULL @@ -47,7 +52,7 @@ def setup() -> Generator: # use default table for MySQLChatMessageHistory with engine.connect() as conn: conn.execute(sqlalchemy.text(f"DROP TABLE IF EXISTS `{table_name}`")) - conn.execute(sqlalchemy.text(f"DROP TABLE IF EXISTS malformed_table")) + conn.execute(sqlalchemy.text(f"DROP TABLE IF EXISTS `{malformed_table}`")) conn.commit() @@ -71,7 +76,9 @@ def test_chat_message_history(memory_engine: MySQLEngine) -> None: assert len(history.messages) == 0 -def test_chat_message_history_table_does_not_exist(memory_engine: MySQLEngine) -> None: +def test_chat_message_history_table_does_not_exist( + memory_engine: MySQLEngine, +) -> None: """Test that MySQLChatMessageHistory fails if table does not exist.""" with pytest.raises(AttributeError) as exc_info: MySQLChatMessageHistory( @@ -90,5 +97,7 @@ def test_chat_message_history_table_malformed_schema( """Test that MySQLChatMessageHistory fails if schema is malformed.""" with pytest.raises(IndexError): MySQLChatMessageHistory( - engine=memory_engine, session_id="test", table_name="malformed_table" + engine=memory_engine, + session_id="test", + table_name=malformed_table, ) From a1c941149e1f1b33991b997e5236c4a7971058fd Mon Sep 17 00:00:00 2001 From: Jack Wotherspoon Date: Tue, 26 Mar 2024 17:22:36 -0400 Subject: [PATCH 14/23] feat: add MySQLVectorStore initialization methods (#52) Add constructor and classmethods for MySQLVectorStore. --- integration.cloudbuild.yaml | 2 +- .../__init__.py | 5 +- .../chat_message_history.py | 2 +- .../engine.py | 77 ++++++ .../indexes.py | 22 ++ .../vectorstore.py | 257 ++++++++++++++++++ tests/integration/test_mysql_vectorstore.py | 182 +++++++++++++ .../test_mysql_vectorstore_from_methods.py | 169 ++++++++++++ 8 files changed, 713 insertions(+), 3 deletions(-) create mode 100644 src/langchain_google_cloud_sql_mysql/indexes.py create mode 100644 src/langchain_google_cloud_sql_mysql/vectorstore.py create mode 100644 tests/integration/test_mysql_vectorstore.py create mode 100644 tests/integration/test_mysql_vectorstore_from_methods.py diff --git a/integration.cloudbuild.yaml b/integration.cloudbuild.yaml index 61e569b..fae720a 100644 --- a/integration.cloudbuild.yaml +++ b/integration.cloudbuild.yaml @@ -43,7 +43,7 @@ availableSecrets: env: "DB_PASSWORD" substitutions: - _INSTANCE_ID: test-instance + _INSTANCE_ID: mysql-vector _REGION: us-central1 _DB_NAME: test _VERSION: "3.8" diff --git a/src/langchain_google_cloud_sql_mysql/__init__.py b/src/langchain_google_cloud_sql_mysql/__init__.py index 72d5e3c..29d2540 100644 --- a/src/langchain_google_cloud_sql_mysql/__init__.py +++ b/src/langchain_google_cloud_sql_mysql/__init__.py @@ -13,14 +13,17 @@ # limitations under the License. from .chat_message_history import MySQLChatMessageHistory -from .engine import MySQLEngine +from .engine import Column, MySQLEngine from .loader import MySQLDocumentSaver, MySQLLoader +from .vectorstore import MySQLVectorStore from .version import __version__ __all__ = [ + "Column", "MySQLChatMessageHistory", "MySQLDocumentSaver", "MySQLEngine", "MySQLLoader", + "MySQLVectorStore", "__version__", ] diff --git a/src/langchain_google_cloud_sql_mysql/chat_message_history.py b/src/langchain_google_cloud_sql_mysql/chat_message_history.py index c51e607..56584b6 100644 --- a/src/langchain_google_cloud_sql_mysql/chat_message_history.py +++ b/src/langchain_google_cloud_sql_mysql/chat_message_history.py @@ -25,7 +25,7 @@ class MySQLChatMessageHistory(BaseChatMessageHistory): """Chat message history stored in a Cloud SQL MySQL database. Args: - engine (MySQLEngine): SQLAlchemy connection pool engine for managing + engine (MySQLEngine): Connection pool engine for managing connections to Cloud SQL for MySQL. session_id (str): Arbitrary key that is used to store the messages of a single chat session. diff --git a/src/langchain_google_cloud_sql_mysql/engine.py b/src/langchain_google_cloud_sql_mysql/engine.py index 27a590c..63a53a6 100644 --- a/src/langchain_google_cloud_sql_mysql/engine.py +++ b/src/langchain_google_cloud_sql_mysql/engine.py @@ -31,6 +31,21 @@ USER_AGENT = "langchain-google-cloud-sql-mysql-python/" + __version__ +from dataclasses import dataclass + + +@dataclass +class Column: + name: str + data_type: str + nullable: bool = True + + def __post_init__(self): + if not isinstance(self.name, str): + raise ValueError("Column name must be type string") + if not isinstance(self.data_type, str): + raise ValueError("Column data_type must be type string") + def _get_iam_principal_email( credentials: google.auth.credentials.Credentials, @@ -206,6 +221,20 @@ def connect(self) -> sqlalchemy.engine.Connection: """ return self.engine.connect() + def _execute(self, query: str, params: Optional[dict] = None) -> None: + """Execute a SQL query.""" + with self.engine.connect() as conn: + conn.execute(sqlalchemy.text(query), params) + conn.commit() + + def _fetch(self, query: str, params: Optional[dict] = None): + """Fetch results from a SQL query.""" + with self.engine.connect() as conn: + result = conn.execute(sqlalchemy.text(query), params) + result_map = result.mappings() + result_fetch = result_map.fetchall() + return result_fetch + def init_chat_history_table(self, table_name: str) -> None: """Create table with schema required for MySQLChatMessageHistory class. @@ -293,3 +322,51 @@ def _load_document_table(self, table_name: str) -> sqlalchemy.Table: metadata = sqlalchemy.MetaData() sqlalchemy.MetaData.reflect(metadata, bind=self.engine, only=[table_name]) return metadata.tables[table_name] + + def init_vectorstore_table( + self, + table_name: str, + vector_size: int, + content_column: str = "content", + embedding_column: str = "embedding", + metadata_columns: List[Column] = [], + metadata_json_column: str = "langchain_metadata", + id_column: str = "langchain_id", + overwrite_existing: bool = False, + store_metadata: bool = True, + ) -> None: + """ + Create a table for saving of vectors to be used with MySQLVectorStore. + + Args: + table_name (str): The MySQL database table name. + vector_size (int): Vector size for the embedding model to be used. + content_column (str): Name of the column to store document content. + Deafult: `page_content`. + embedding_column (str) : Name of the column to store vector embeddings. + Default: `embedding`. + metadata_columns (List[Column]): A list of Columns to create for custom + metadata. Default: []. Optional. + metadata_json_column (str): The column to store extra metadata in JSON format. + Default: `langchain_metadata`. Optional. + id_column (str): Name of the column to store ids. + Default: `langchain_id`. Optional, + overwrite_existing (bool): Whether to drop existing table. Default: False. + store_metadata (bool): Whether to store metadata in the table. + Default: True. + """ + query = f"""CREATE TABLE `{table_name}`( + `{id_column}` CHAR(36) PRIMARY KEY, + `{content_column}` TEXT NOT NULL, + `{embedding_column}` vector({vector_size}) USING VARBINARY NOT NULL""" + for column in metadata_columns: + nullable = "NOT NULL" if not column.nullable else "" + query += f",\n`{column.name}` {column.data_type} {nullable}" + if store_metadata: + query += f""",\n`{metadata_json_column}` JSON""" + query += "\n);" + + with self.engine.connect() as conn: + if overwrite_existing: + conn.execute(sqlalchemy.text(f"DROP TABLE IF EXISTS `{table_name}`")) + conn.execute(sqlalchemy.text(query)) diff --git a/src/langchain_google_cloud_sql_mysql/indexes.py b/src/langchain_google_cloud_sql_mysql/indexes.py new file mode 100644 index 0000000..d038abb --- /dev/null +++ b/src/langchain_google_cloud_sql_mysql/indexes.py @@ -0,0 +1,22 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC +from dataclasses import dataclass + + +@dataclass +class QueryOptions(ABC): + def to_string(self) -> str: + raise NotImplementedError("to_string method must be implemented by subclass") diff --git a/src/langchain_google_cloud_sql_mysql/vectorstore.py b/src/langchain_google_cloud_sql_mysql/vectorstore.py new file mode 100644 index 0000000..602848e --- /dev/null +++ b/src/langchain_google_cloud_sql_mysql/vectorstore.py @@ -0,0 +1,257 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO: Remove below import when minimum supported Python version is 3.10 +from __future__ import annotations + +import json +from typing import Any, Iterable, List, Optional, Type + +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore + +from .engine import MySQLEngine +from .indexes import QueryOptions + + +class MySQLVectorStore(VectorStore): + def __init__( + self, + engine: MySQLEngine, + embedding_service: Embeddings, + table_name: str, + content_column: str = "content", + embedding_column: str = "embedding", + metadata_columns: List[str] = [], + ignore_metadata_columns: Optional[List[str]] = None, + id_column: str = "langchain_id", + metadata_json_column: Optional[str] = "langchain_metadata", + query_options: Optional[QueryOptions] = None, + ): + """Constructor for MySQLVectorStore. + Args: + engine (MySQLEngine): Connection pool engine for managing + connections to Cloud SQL for MySQL database. + embedding_service (Embeddings): Text embedding model to use. + table_name (str): Name of an existing table or table to be created. + content_column (str): Column that represent a Document's + page_content. Defaults to "content". + embedding_column (str): Column for embedding vectors. The embedding + is generated from the document value. Defaults to "embedding". + metadata_columns (List[str]): Column(s) that represent a document's metadata. + ignore_metadata_columns (List[str]): Column(s) to ignore in + pre-existing tables for a document's metadata. Can not be used + with metadata_columns. Defaults to None. + id_column (str): Column that represents the Document's id. + Defaults to "langchain_id". + metadata_json_column (str): Column to store metadata as JSON. + Defaults to "langchain_metadata". + """ + if metadata_columns and ignore_metadata_columns: + raise ValueError( + "Can not use both metadata_columns and ignore_metadata_columns." + ) + # Get field type information + stmt = f"SELECT column_name, data_type FROM information_schema.columns WHERE table_name = '{table_name}'" + + results = engine._fetch(stmt) + columns = {} + for field in results: + columns[field["COLUMN_NAME"]] = field["DATA_TYPE"] + + # Check columns + if id_column not in columns: + raise ValueError(f"Id column, {id_column}, does not exist.") + if content_column not in columns: + raise ValueError(f"Content column, {content_column}, does not exist.") + content_type = columns[content_column] + if content_type != "text" and "char" not in content_type: + raise ValueError( + f"Content column, {content_column}, is type, {content_type}. It must be a type of character string." + ) + if embedding_column not in columns: + raise ValueError(f"Embedding column, {embedding_column}, does not exist.") + if columns[embedding_column] != "varbinary": + raise ValueError( + f"Embedding column, {embedding_column}, is not type Vector (varbinary)." + ) + + metadata_json_column = ( + None if metadata_json_column not in columns else metadata_json_column + ) + + # If using metadata_columns check to make sure column exists + for column in metadata_columns: + if column not in columns: + raise ValueError(f"Metadata column, {column}, does not exist.") + + # If using ignore_metadata_columns, filter out known columns and set known metadata columns + all_columns = columns + if ignore_metadata_columns: + for column in ignore_metadata_columns: + del all_columns[column] + + del all_columns[id_column] + del all_columns[content_column] + del all_columns[embedding_column] + metadata_columns = [key for key, _ in all_columns.keys()] + + # set all class attributes + self.engine = engine + self.embedding_service = embedding_service + self.table_name = table_name + self.content_column = content_column + self.embedding_column = embedding_column + self.metadata_columns = metadata_columns + self.id_column = id_column + self.metadata_json_column = metadata_json_column + self.query_options = query_options + + @property + def embeddings(self) -> Embeddings: + return self.embedding_service + + def _add_embeddings( + self, + texts: Iterable[str], + embeddings: List[List[float]], + metadatas: Optional[List[dict]] = None, + ids: Optional[List[str]] = None, + **kwargs: Any, + ) -> List[str]: + if not ids: + ids = ["NULL" for _ in texts] + if not metadatas: + metadatas = [{} for _ in texts] + # Insert embeddings + for id, content, embedding, metadata in zip(ids, texts, embeddings, metadatas): + metadata_col_names = ( + ", " + ", ".join(self.metadata_columns) + if len(self.metadata_columns) > 0 + else "" + ) + insert_stmt = f"INSERT INTO `{self.table_name}`(`{self.id_column}`, `{self.content_column}`, `{self.embedding_column}`{metadata_col_names}" + values = {"id": id, "content": content, "embedding": str(embedding)} + values_stmt = "VALUES (:id, :content, string_to_vector(:embedding)" + + # Add metadata + extra = metadata + for metadata_column in self.metadata_columns: + if metadata_column in metadata: + values_stmt += f", :{metadata_column}" + values[metadata_column] = metadata[metadata_column] + del extra[metadata_column] + else: + values_stmt += ",null" + + # Add JSON column and/or close statement + insert_stmt += ( + f", {self.metadata_json_column})" if self.metadata_json_column else ")" + ) + if self.metadata_json_column: + values_stmt += ", :extra)" + values["extra"] = json.dumps(extra) + else: + values_stmt += ")" + + query = insert_stmt + values_stmt + self.engine._execute(query, values) + + return ids + + def add_texts( + self, + texts: Iterable[str], + metadatas: Optional[List[dict]] = None, + ids: Optional[List[str]] = None, + **kwargs: Any, + ) -> List[str]: + embeddings = self.embedding_service.embed_documents(list(texts)) + ids = self._add_embeddings( + texts, embeddings, metadatas=metadatas, ids=ids, **kwargs + ) + return ids + + @classmethod + def from_texts( # type: ignore[override] + cls: Type[MySQLVectorStore], + texts: List[str], + embedding: Embeddings, + engine: MySQLEngine, + table_name: str, + metadatas: Optional[List[dict]] = None, + ids: Optional[List[str]] = None, + content_column: str = "content", + embedding_column: str = "embedding", + metadata_columns: List[str] = [], + ignore_metadata_columns: Optional[List[str]] = None, + id_column: str = "langchain_id", + metadata_json_column: str = "langchain_metadata", + **kwargs: Any, + ): + vs = cls( + engine=engine, + embedding_service=embedding, + table_name=table_name, + content_column=content_column, + embedding_column=embedding_column, + metadata_columns=metadata_columns, + ignore_metadata_columns=ignore_metadata_columns, + id_column=id_column, + metadata_json_column=metadata_json_column, + ) + vs.add_texts(texts, metadatas=metadatas, ids=ids, **kwargs) + return vs + + @classmethod + def from_documents( # type: ignore[override] + cls: Type[MySQLVectorStore], + documents: List[Document], + embedding: Embeddings, + engine: MySQLEngine, + table_name: str, + ids: Optional[List[str]] = None, + content_column: str = "content", + embedding_column: str = "embedding", + metadata_columns: List[str] = [], + ignore_metadata_columns: Optional[List[str]] = None, + id_column: str = "langchain_id", + metadata_json_column: str = "langchain_metadata", + **kwargs: Any, + ) -> MySQLVectorStore: + vs = cls( + engine=engine, + embedding_service=embedding, + table_name=table_name, + content_column=content_column, + embedding_column=embedding_column, + metadata_columns=metadata_columns, + ignore_metadata_columns=ignore_metadata_columns, + id_column=id_column, + metadata_json_column=metadata_json_column, + ) + texts = [doc.page_content for doc in documents] + metadatas = [doc.metadata for doc in documents] + vs.add_texts(texts, metadatas=metadatas, ids=ids, **kwargs) + return vs + + def similarity_search( + self, + query: str, + k: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any, + ): + raise NotImplementedError diff --git a/tests/integration/test_mysql_vectorstore.py b/tests/integration/test_mysql_vectorstore.py new file mode 100644 index 0000000..a362cea --- /dev/null +++ b/tests/integration/test_mysql_vectorstore.py @@ -0,0 +1,182 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +import pytest +from langchain_community.embeddings import DeterministicFakeEmbedding +from langchain_core.documents import Document + +from langchain_google_cloud_sql_mysql import Column, MySQLEngine, MySQLVectorStore + +DEFAULT_TABLE = "test_table" + str(uuid.uuid4()).replace("-", "_") +CUSTOM_TABLE = "test-table-custom" + str(uuid.uuid4()) +VECTOR_SIZE = 768 + +embeddings_service = DeterministicFakeEmbedding(size=VECTOR_SIZE) + +texts = ["foo", "bar", "baz"] +metadatas = [{"page": str(i), "source": "google.com"} for i in range(len(texts))] +docs = [ + Document(page_content=texts[i], metadata=metadatas[i]) for i in range(len(texts)) +] + +embeddings = [embeddings_service.embed_query(texts[i]) for i in range(len(texts))] + + +def get_env_var(key: str, desc: str) -> str: + v = os.environ.get(key) + if v is None: + raise ValueError(f"Must set env var {key} to: {desc}") + return v + + +class TestVectorStore: + @pytest.fixture(scope="module") + def db_project(self) -> str: + return get_env_var("PROJECT_ID", "project id for google cloud") + + @pytest.fixture(scope="module") + def db_region(self) -> str: + return get_env_var("REGION", "region for cloud sql instance") + + @pytest.fixture(scope="module") + def db_instance(self) -> str: + return get_env_var("INSTANCE_ID", "instance for cloud sql") + + @pytest.fixture(scope="module") + def db_name(self) -> str: + return get_env_var("DB_NAME", "database name on cloud sql instance") + + @pytest.fixture(scope="class") + def engine(self, db_project, db_region, db_instance, db_name): + engine = MySQLEngine.from_instance( + project_id=db_project, + instance=db_instance, + region=db_region, + database=db_name, + ) + + yield engine + + @pytest.fixture(scope="function") + def vs(self, engine): + engine.init_vectorstore_table( + DEFAULT_TABLE, + VECTOR_SIZE, + overwrite_existing=True, + ) + + vs = MySQLVectorStore( + engine, + embedding_service=embeddings_service, + table_name=DEFAULT_TABLE, + ) + yield vs + engine._execute(f"DROP TABLE IF EXISTS `{DEFAULT_TABLE}`") + + @pytest.fixture(scope="function") + def vs_custom(self, engine): + engine.init_vectorstore_table( + CUSTOM_TABLE, + VECTOR_SIZE, + id_column="myid", + content_column="mycontent", + embedding_column="myembedding", + metadata_columns=[Column("page", "TEXT"), Column("source", "TEXT")], + metadata_json_column="mymeta", + overwrite_existing=True, + ) + + vs = MySQLVectorStore( + engine, + embedding_service=embeddings_service, + table_name=CUSTOM_TABLE, + id_column="myid", + content_column="mycontent", + embedding_column="myembedding", + metadata_columns=["page", "source"], + metadata_json_column="mymeta", + ) + yield vs + engine._execute(f"DROP TABLE IF EXISTS `{CUSTOM_TABLE}`") + + def test_post_init(self, engine): + with pytest.raises(ValueError): + MySQLVectorStore( + engine, + embedding_service=embeddings_service, + table_name=CUSTOM_TABLE, + id_column="myid", + content_column="noname", + embedding_column="myembedding", + metadata_columns=["page", "source"], + metadata_json_column="mymeta", + ) + + def test_add_texts(self, engine, vs): + ids = [str(uuid.uuid4()) for _ in range(len(texts))] + vs.add_texts(texts, ids=ids) + results = engine._fetch(f"SELECT * FROM `{DEFAULT_TABLE}`") + assert len(results) == 3 + + ids = [str(uuid.uuid4()) for _ in range(len(texts))] + vs.add_texts(texts, metadatas, ids) + results = engine._fetch(f"SELECT * FROM `{DEFAULT_TABLE}`") + assert len(results) == 6 + engine._execute(f"TRUNCATE TABLE `{DEFAULT_TABLE}`") + + def test_add_texts_edge_cases(self, engine, vs): + texts = ["Taylor's", '"Swift"', "best-friend"] + ids = [str(uuid.uuid4()) for _ in range(len(texts))] + vs.add_texts(texts, ids=ids) + results = engine._fetch(f"SELECT * FROM `{DEFAULT_TABLE}`") + assert len(results) == 3 + engine._execute(f"TRUNCATE TABLE `{DEFAULT_TABLE}`") + + def test_add_embedding(self, engine, vs): + ids = [str(uuid.uuid4()) for _ in range(len(texts))] + vs._add_embeddings(texts, embeddings, metadatas, ids) + results = engine._fetch(f"SELECT * FROM `{DEFAULT_TABLE}`") + assert len(results) == 3 + engine._execute(f"TRUNCATE TABLE `{DEFAULT_TABLE}`") + + def test_add_texts_custom(self, engine, vs_custom): + ids = [str(uuid.uuid4()) for _ in range(len(texts))] + vs_custom.add_texts(texts, ids=ids) + results = engine._fetch(f"SELECT * FROM `{CUSTOM_TABLE}`") + content = [result["mycontent"] for result in results] + assert len(results) == 3 + assert "foo" in content + assert "bar" in content + assert "baz" in content + assert results[0]["myembedding"] + assert results[0]["page"] is None + assert results[0]["source"] is None + + ids = [str(uuid.uuid4()) for _ in range(len(texts))] + vs_custom.add_texts(texts, metadatas, ids) + results = engine._fetch(f"SELECT * FROM `{CUSTOM_TABLE}`") + assert len(results) == 6 + engine._execute(f"TRUNCATE TABLE `{CUSTOM_TABLE}`") + + def test_add_embedding_custom(self, engine, vs_custom): + ids = [str(uuid.uuid4()) for _ in range(len(texts))] + vs_custom._add_embeddings(texts, embeddings, metadatas, ids) + results = engine._fetch(f"SELECT * FROM `{CUSTOM_TABLE}`") + assert len(results) == 3 + engine._execute(f"TRUNCATE TABLE `{CUSTOM_TABLE}`") + + # Need tests for store metadata=False diff --git a/tests/integration/test_mysql_vectorstore_from_methods.py b/tests/integration/test_mysql_vectorstore_from_methods.py new file mode 100644 index 0000000..c165559 --- /dev/null +++ b/tests/integration/test_mysql_vectorstore_from_methods.py @@ -0,0 +1,169 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +import pytest +from langchain_community.embeddings import DeterministicFakeEmbedding +from langchain_core.documents import Document + +from langchain_google_cloud_sql_mysql import Column, MySQLEngine, MySQLVectorStore + +DEFAULT_TABLE = "test_table" + str(uuid.uuid4()).replace("-", "_") +CUSTOM_TABLE = "test_table_custom" + str(uuid.uuid4()).replace("-", "_") +VECTOR_SIZE = 768 + + +embeddings_service = DeterministicFakeEmbedding(size=VECTOR_SIZE) + +texts = ["foo", "bar", "baz"] +metadatas = [{"page": str(i), "source": "google.com"} for i in range(len(texts))] +docs = [ + Document(page_content=texts[i], metadata=metadatas[i]) for i in range(len(texts)) +] + +embeddings = [embeddings_service.embed_query(texts[i]) for i in range(len(texts))] + + +def get_env_var(key: str, desc: str) -> str: + v = os.environ.get(key) + if v is None: + raise ValueError(f"Must set env var {key} to: {desc}") + return v + + +class TestVectorStoreFromMethods: + @pytest.fixture(scope="module") + def db_project(self) -> str: + return get_env_var("PROJECT_ID", "project id for google cloud") + + @pytest.fixture(scope="module") + def db_region(self) -> str: + return get_env_var("REGION", "region for cloud sql instance") + + @pytest.fixture(scope="module") + def db_instance(self) -> str: + return get_env_var("INSTANCE_ID", "instance for cloud sql") + + @pytest.fixture(scope="module") + def db_name(self) -> str: + return get_env_var("DB_NAME", "database name on cloud sql instance") + + @pytest.fixture + def engine(self, db_project, db_region, db_instance, db_name): + engine = MySQLEngine.from_instance( + project_id=db_project, + instance=db_instance, + region=db_region, + database=db_name, + ) + engine.init_vectorstore_table(DEFAULT_TABLE, VECTOR_SIZE) + engine.init_vectorstore_table( + CUSTOM_TABLE, + VECTOR_SIZE, + id_column="myid", + content_column="mycontent", + embedding_column="myembedding", + metadata_columns=[Column("page", "TEXT"), Column("source", "TEXT")], + store_metadata=False, + ) + yield engine + engine._execute(f"DROP TABLE IF EXISTS `{DEFAULT_TABLE}`") + engine._execute(f"DROP TABLE IF EXISTS `{CUSTOM_TABLE}`") + + def test_from_texts(self, engine): + ids = [str(uuid.uuid4()) for i in range(len(texts))] + MySQLVectorStore.from_texts( + texts, + embeddings_service, + engine, + DEFAULT_TABLE, + metadatas=metadatas, + ids=ids, + ) + results = engine._fetch(f"SELECT * FROM `{DEFAULT_TABLE}`") + assert len(results) == 3 + engine._execute(f"TRUNCATE TABLE `{DEFAULT_TABLE}`") + + def test_from_docs(self, engine): + ids = [str(uuid.uuid4()) for i in range(len(texts))] + MySQLVectorStore.from_documents( + docs, + embeddings_service, + engine, + DEFAULT_TABLE, + ids=ids, + ) + results = engine._fetch(f"SELECT * FROM `{DEFAULT_TABLE}`") + assert len(results) == 3 + engine._execute(f"TRUNCATE TABLE `{DEFAULT_TABLE}`") + + def test_from_texts_custom(self, engine): + ids = [str(uuid.uuid4()) for i in range(len(texts))] + MySQLVectorStore.from_texts( + texts, + embeddings_service, + engine, + CUSTOM_TABLE, + ids=ids, + id_column="myid", + content_column="mycontent", + embedding_column="myembedding", + metadata_columns=["page", "source"], + ) + results = engine._fetch(f"SELECT * FROM `{CUSTOM_TABLE}`") + content = [result["mycontent"] for result in results] + assert len(results) == 3 + assert "foo" in content + assert "bar" in content + assert "baz" in content + assert results[0]["myembedding"] + assert results[0]["page"] is None + assert results[0]["source"] is None + + def test_from_docs_custom(self, engine): + ids = [str(uuid.uuid4()) for i in range(len(texts))] + docs = [ + Document( + page_content=texts[i], + metadata={"page": str(i), "source": "google.com"}, + ) + for i in range(len(texts)) + ] + MySQLVectorStore.from_documents( + docs, + embeddings_service, + engine, + CUSTOM_TABLE, + ids=ids, + id_column="myid", + content_column="mycontent", + embedding_column="myembedding", + metadata_columns=["page", "source"], + ) + + results = engine._fetch(f"SELECT * FROM `{CUSTOM_TABLE}`") + content = [result["mycontent"] for result in results] + assert len(results) == 3 + assert "foo" in content + assert "bar" in content + assert "baz" in content + assert results[0]["myembedding"] + pages = [result["page"] for result in results] + assert "0" in pages + assert "1" in pages + assert "2" in pages + assert results[0]["source"] == "google.com" + engine._execute(f"TRUNCATE TABLE `{CUSTOM_TABLE}`") From ce45617ae6c9f1b6e539c31e4bcdd47aa7daf964 Mon Sep 17 00:00:00 2001 From: Jack Wotherspoon Date: Wed, 27 Mar 2024 10:24:05 -0400 Subject: [PATCH 15/23] feat: support add and delete from MySQLVectorStore (#53) --- .../engine.py | 2 +- .../vectorstore.py | 26 +++++++++ tests/integration/test_mysql_vectorstore.py | 56 +++++++++++++++++++ 3 files changed, 83 insertions(+), 1 deletion(-) diff --git a/src/langchain_google_cloud_sql_mysql/engine.py b/src/langchain_google_cloud_sql_mysql/engine.py index 63a53a6..a410748 100644 --- a/src/langchain_google_cloud_sql_mysql/engine.py +++ b/src/langchain_google_cloud_sql_mysql/engine.py @@ -342,7 +342,7 @@ def init_vectorstore_table( table_name (str): The MySQL database table name. vector_size (int): Vector size for the embedding model to be used. content_column (str): Name of the column to store document content. - Deafult: `page_content`. + Default: `page_content`. embedding_column (str) : Name of the column to store vector embeddings. Default: `embedding`. metadata_columns (List[Column]): A list of Columns to create for custom diff --git a/src/langchain_google_cloud_sql_mysql/vectorstore.py b/src/langchain_google_cloud_sql_mysql/vectorstore.py index 602848e..31d9af5 100644 --- a/src/langchain_google_cloud_sql_mysql/vectorstore.py +++ b/src/langchain_google_cloud_sql_mysql/vectorstore.py @@ -184,6 +184,32 @@ def add_texts( ) return ids + def add_documents( + self, + documents: List[Document], + ids: Optional[List[str]] = None, + **kwargs: Any, + ) -> List[str]: + texts = [doc.page_content for doc in documents] + metadatas = [doc.metadata for doc in documents] + ids = self.add_texts(texts, metadatas=metadatas, ids=ids, **kwargs) + return ids + + def delete( + self, + ids: Optional[List[str]] = None, + **kwargs: Any, + ) -> bool: + if not ids: + return False + + id_list = ", ".join([f"'{id}'" for id in ids]) + query = ( + f"DELETE FROM `{self.table_name}` WHERE `{self.id_column}` in ({id_list})" + ) + self.engine._execute(query) + return True + @classmethod def from_texts( # type: ignore[override] cls: Type[MySQLVectorStore], diff --git a/tests/integration/test_mysql_vectorstore.py b/tests/integration/test_mysql_vectorstore.py index a362cea..75cfa22 100644 --- a/tests/integration/test_mysql_vectorstore.py +++ b/tests/integration/test_mysql_vectorstore.py @@ -146,6 +146,13 @@ def test_add_texts_edge_cases(self, engine, vs): assert len(results) == 3 engine._execute(f"TRUNCATE TABLE `{DEFAULT_TABLE}`") + def test_add_docs(self, engine, vs): + ids = [str(uuid.uuid4()) for i in range(len(texts))] + vs.add_documents(docs, ids=ids) + results = engine._fetch(f"SELECT * FROM `{DEFAULT_TABLE}`") + assert len(results) == 3 + engine._execute(f"TRUNCATE TABLE `{DEFAULT_TABLE}`") + def test_add_embedding(self, engine, vs): ids = [str(uuid.uuid4()) for _ in range(len(texts))] vs._add_embeddings(texts, embeddings, metadatas, ids) @@ -153,6 +160,16 @@ def test_add_embedding(self, engine, vs): assert len(results) == 3 engine._execute(f"TRUNCATE TABLE `{DEFAULT_TABLE}`") + def test_delete(self, engine, vs): + ids = [str(uuid.uuid4()) for _ in range(len(texts))] + vs.add_texts(texts, ids=ids) + results = engine._fetch(f"SELECT * FROM `{DEFAULT_TABLE}`") + assert len(results) == 3 + # delete an ID + vs.delete([ids[0]]) + results = engine._fetch(f"SELECT * FROM `{DEFAULT_TABLE}`") + assert len(results) == 2 + def test_add_texts_custom(self, engine, vs_custom): ids = [str(uuid.uuid4()) for _ in range(len(texts))] vs_custom.add_texts(texts, ids=ids) @@ -172,6 +189,31 @@ def test_add_texts_custom(self, engine, vs_custom): assert len(results) == 6 engine._execute(f"TRUNCATE TABLE `{CUSTOM_TABLE}`") + def test_add_docs_custom(self, engine, vs_custom): + ids = [str(uuid.uuid4()) for i in range(len(texts))] + docs = [ + Document( + page_content=texts[i], + metadata={"page": str(i), "source": "google.com"}, + ) + for i in range(len(texts)) + ] + vs_custom.add_documents(docs, ids=ids) + + results = engine._fetch(f"SELECT * FROM `{CUSTOM_TABLE}`") + content = [result["mycontent"] for result in results] + assert len(results) == 3 + assert "foo" in content + assert "bar" in content + assert "baz" in content + assert results[0]["myembedding"] + pages = [result["page"] for result in results] + assert "0" in pages + assert "1" in pages + assert "2" in pages + assert results[0]["source"] == "google.com" + engine._execute(f"TRUNCATE TABLE `{CUSTOM_TABLE}`") + def test_add_embedding_custom(self, engine, vs_custom): ids = [str(uuid.uuid4()) for _ in range(len(texts))] vs_custom._add_embeddings(texts, embeddings, metadatas, ids) @@ -179,4 +221,18 @@ def test_add_embedding_custom(self, engine, vs_custom): assert len(results) == 3 engine._execute(f"TRUNCATE TABLE `{CUSTOM_TABLE}`") + def test_delete_custom(self, engine, vs_custom): + ids = [str(uuid.uuid4()) for _ in range(len(texts))] + vs_custom.add_texts(texts, ids=ids) + results = engine._fetch(f"SELECT * FROM `{CUSTOM_TABLE}`") + content = [result["mycontent"] for result in results] + assert len(results) == 3 + assert "foo" in content + # delete an ID + vs_custom.delete([ids[0]]) + results = engine._fetch(f"SELECT * FROM `{CUSTOM_TABLE}`") + content = [result["mycontent"] for result in results] + assert len(results) == 2 + assert "foo" not in content + # Need tests for store metadata=False From 9cc52c188cd0015b29a7c9f146876158ed39132f Mon Sep 17 00:00:00 2001 From: Averi Kitsch Date: Wed, 27 Mar 2024 11:07:59 -0700 Subject: [PATCH 16/23] chore: update kokoro deps (#54) --- .kokoro/requirements.txt | 65 +++++++++++++++------------------------- 1 file changed, 24 insertions(+), 41 deletions(-) diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 8db52d8..8069e8e 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -84,9 +84,9 @@ charset-normalizer==2.1.1 \ # via # -r requirements.in # requests -click==8.1.7 \ - --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ - --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de +click==8.0.4 \ + --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ + --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via # -r requirements.in # gcp-docuploader @@ -244,7 +244,9 @@ google-crc32c==1.5.0 \ --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via google-resumable-media + # via + # google-cloud-storage + # google-resumable-media google-resumable-media==2.7.0 \ --hash=sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b \ --hash=sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08 @@ -265,15 +267,13 @@ importlib-metadata==4.13.0 \ # keyring # twine jaraco-classes==3.3.1 \ - --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ - --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a + --hash=sha256:86b534de565381f6b3c1c830d13f931d7be1a75f0081c57dff615578676e2206 \ + --hash=sha256:cb28a5ebda8bc47d8c8015307d93163464f9f2b91ab4006e09ff0ce07e8bfb30 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage + # via secretstorage jinja2==3.1.3 \ --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 @@ -288,7 +288,7 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -MarkupSafe==2.1.5 \ +markupsafe==2.1.5 \ --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ @@ -395,29 +395,18 @@ platformdirs==4.2.0 \ --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -443,7 +432,7 @@ pygments==2.17.2 \ # via # readme-renderer # rich -PyJWT==2.8.0 \ +pyjwt==2.8.0 \ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool @@ -490,15 +479,12 @@ rsa==4.9 \ secretstorage==3.3.3 \ --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via - # -r requirements.in - # keyring + # via -r requirements.in six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via # gcp-docuploader - # google-auth # python-dateutil tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ @@ -535,6 +521,3 @@ zipp==3.18.1 \ # pinned when the requirements file includes hashes and the requirement is not # satisfied by a package already installed. Consider using the --allow-unsafe flag. # setuptools -jaraco.classes==3.3.1 \ - --hash=sha256:86b534de565381f6b3c1c830d13f931d7be1a75f0081c57dff615578676e2206 \ - --hash=sha256:cb28a5ebda8bc47d8c8015307d93163464f9f2b91ab4006e09ff0ce07e8bfb30 From 2e30b48ad2d1fb11f5f8964808ed5143d9231084 Mon Sep 17 00:00:00 2001 From: totoleon Date: Thu, 28 Mar 2024 11:15:02 -0700 Subject: [PATCH 17/23] feat: add index types for vector search (#55) * feat: adding index operations and tests --- integration.cloudbuild.yaml | 2 +- .../__init__.py | 6 + .../engine.py | 8 +- .../indexes.py | 88 ++++++++- .../vectorstore.py | 73 ++++++- .../test_mysql_vectorstore_index.py | 180 ++++++++++++++++++ 6 files changed, 349 insertions(+), 8 deletions(-) create mode 100644 tests/integration/test_mysql_vectorstore_index.py diff --git a/integration.cloudbuild.yaml b/integration.cloudbuild.yaml index fae720a..61e569b 100644 --- a/integration.cloudbuild.yaml +++ b/integration.cloudbuild.yaml @@ -43,7 +43,7 @@ availableSecrets: env: "DB_PASSWORD" substitutions: - _INSTANCE_ID: mysql-vector + _INSTANCE_ID: test-instance _REGION: us-central1 _DB_NAME: test _VERSION: "3.8" diff --git a/src/langchain_google_cloud_sql_mysql/__init__.py b/src/langchain_google_cloud_sql_mysql/__init__.py index 29d2540..b59cf2d 100644 --- a/src/langchain_google_cloud_sql_mysql/__init__.py +++ b/src/langchain_google_cloud_sql_mysql/__init__.py @@ -14,16 +14,22 @@ from .chat_message_history import MySQLChatMessageHistory from .engine import Column, MySQLEngine +from .indexes import DistanceMeasure, IndexType, QueryOptions, SearchType, VectorIndex from .loader import MySQLDocumentSaver, MySQLLoader from .vectorstore import MySQLVectorStore from .version import __version__ __all__ = [ "Column", + "DistanceMeasure", + "IndexType", "MySQLChatMessageHistory", "MySQLDocumentSaver", "MySQLEngine", "MySQLLoader", "MySQLVectorStore", + "QueryOptions", + "SearchType", + "VectorIndex", "__version__", ] diff --git a/src/langchain_google_cloud_sql_mysql/engine.py b/src/langchain_google_cloud_sql_mysql/engine.py index a410748..9dd7375 100644 --- a/src/langchain_google_cloud_sql_mysql/engine.py +++ b/src/langchain_google_cloud_sql_mysql/engine.py @@ -222,11 +222,17 @@ def connect(self) -> sqlalchemy.engine.Connection: return self.engine.connect() def _execute(self, query: str, params: Optional[dict] = None) -> None: - """Execute a SQL query.""" + """Executes a SQL query within a transaction.""" with self.engine.connect() as conn: conn.execute(sqlalchemy.text(query), params) conn.commit() + def _execute_outside_tx(self, query: str, params: Optional[dict] = None) -> None: + """Executes a SQL query with autocommit (outside of transaction).""" + with self.engine.connect() as conn: + conn = conn.execution_options(isolation_level="AUTOCOMMIT") + conn.execute(sqlalchemy.text(query), params) + def _fetch(self, query: str, params: Optional[dict] = None): """Fetch results from a SQL query.""" with self.engine.connect() as conn: diff --git a/src/langchain_google_cloud_sql_mysql/indexes.py b/src/langchain_google_cloud_sql_mysql/indexes.py index d038abb..e7a6fdc 100644 --- a/src/langchain_google_cloud_sql_mysql/indexes.py +++ b/src/langchain_google_cloud_sql_mysql/indexes.py @@ -12,11 +12,91 @@ # See the License for the specific language governing permissions and # limitations under the License. -from abc import ABC from dataclasses import dataclass +from enum import Enum +from typing import Optional + + +class SearchType(Enum): + """Defines the types of search algorithms that can be used. + + Attributes: + KNN: K-Nearest Neighbors search. + ANN: Approximate Nearest Neighbors search. + """ + + KNN = "KNN" + ANN = "ANN" @dataclass -class QueryOptions(ABC): - def to_string(self) -> str: - raise NotImplementedError("to_string method must be implemented by subclass") +class QueryOptions: + """Holds configuration options for executing a search query. + + Attributes: + num_partitions (Optional[int]): The number of partitions to divide the search space into. None means default partitioning. + num_neighbors (Optional[int]): The number of nearest neighbors to retrieve. None means use the default. + search_type (SearchType): The type of search algorithm to use. Defaults to KNN. + """ + + num_partitions: Optional[int] = None + num_neighbors: Optional[int] = None + search_type: SearchType = SearchType.KNN + + +DEFAULT_QUERY_OPTIONS = QueryOptions() + + +class IndexType(Enum): + """Defines the types of indexes that can be used for vector storage. + + Attributes: + BRUTE_FORCE_SCAN: A simple brute force scan approach. + TREE_AH: A tree-based index, specifically Annoy (Approximate Nearest Neighbors Oh Yeah). + TREE_SQ: A tree-based index, specifically ScaNN (Scalable Nearest Neighbors). + """ + + BRUTE_FORCE_SCAN = "BRUTE_FORCE" + TREE_AH = "TREE_AH" + TREE_SQ = "TREE_SQ" + + +class DistanceMeasure(Enum): + """Enumerates the types of distance measures that can be used in searches. + + Attributes: + COSINE: Cosine similarity measure. + SQUARED_L2: Squared L2 norm (Euclidean) distance. + DOT_PRODUCT: Dot product similarity. + """ + + COSINE = "cosine" + SQUARED_L2 = "squared_l2" + DOT_PRODUCT = "dot_product" + + +class VectorIndex: + """Represents a vector index for storing and querying vectors. + + Attributes: + name (Optional[str]): The name of the index. + index_type (Optional[IndexType]): The type of index. + distance_measure (Optional[DistanceMeasure]): The distance measure to use for the index. + num_partitions (Optional[int]): The number of partitions for the index. None for default. + num_neighbors (Optional[int]): The default number of neighbors to return for queries. + """ + + def __init__( + self, + name: Optional[str] = None, + index_type: Optional[IndexType] = None, + distance_measure: Optional[DistanceMeasure] = None, + num_partitions: Optional[int] = None, + num_neighbors: Optional[int] = None, + ): + """Initializes a new instance of the VectorIndex class.""" + self.name = name + self.index_type = index_type + self.distance_measure = distance_measure + self.num_partitions = num_partitions + self.num_neighbors = num_neighbors diff --git a/src/langchain_google_cloud_sql_mysql/vectorstore.py b/src/langchain_google_cloud_sql_mysql/vectorstore.py index 31d9af5..c5f363e 100644 --- a/src/langchain_google_cloud_sql_mysql/vectorstore.py +++ b/src/langchain_google_cloud_sql_mysql/vectorstore.py @@ -23,7 +23,9 @@ from langchain_core.vectorstores import VectorStore from .engine import MySQLEngine -from .indexes import QueryOptions +from .indexes import DEFAULT_QUERY_OPTIONS, QueryOptions, SearchType, VectorIndex + +DEFAULT_INDEX_NAME_SUFFIX = "langchainvectorindex" class MySQLVectorStore(VectorStore): @@ -38,7 +40,7 @@ def __init__( ignore_metadata_columns: Optional[List[str]] = None, id_column: str = "langchain_id", metadata_json_column: Optional[str] = "langchain_metadata", - query_options: Optional[QueryOptions] = None, + query_options: QueryOptions = DEFAULT_QUERY_OPTIONS, ): """Constructor for MySQLVectorStore. Args: @@ -118,11 +120,16 @@ def __init__( self.id_column = id_column self.metadata_json_column = metadata_json_column self.query_options = query_options + self.db_name = self.__get_db_name() @property def embeddings(self) -> Embeddings: return self.embedding_service + def __get_db_name(self) -> str: + result = self.engine._fetch("SELECT DATABASE();") + return result[0]["DATABASE()"] + def _add_embeddings( self, texts: Iterable[str], @@ -210,6 +217,64 @@ def delete( self.engine._execute(query) return True + def apply_vector_index(self, vector_index: VectorIndex): + # Construct the default index name + if not vector_index.name: + vector_index.name = f"{self.table_name}_{DEFAULT_INDEX_NAME_SUFFIX}" + query_template = f"CALL mysql.create_vector_index('{vector_index.name}', '{self.db_name}.{self.table_name}', '{self.embedding_column}', '{{}}');" + self.__exec_apply_vector_index(query_template, vector_index) + # After applying an index to the table, set the query option search type to be ANN + self.query_options.search_type = SearchType.ANN + + def alter_vector_index(self, vector_index: VectorIndex): + existing_index_name = self._get_vector_index_name() + if not existing_index_name: + raise ValueError("No existing vector index found.") + if not vector_index.name: + vector_index.name = existing_index_name.split(".")[1] + if existing_index_name.split(".")[1] != vector_index.name: + raise ValueError( + f"Existing index name {existing_index_name} does not match the new index name {vector_index.name}." + ) + query_template = ( + f"CALL mysql.alter_vector_index('{existing_index_name}', '{{}}');" + ) + self.__exec_apply_vector_index(query_template, vector_index) + + def __exec_apply_vector_index(self, query_template: str, vector_index: VectorIndex): + index_options = [] + if vector_index.index_type: + index_options.append(f"index_type={vector_index.index_type.value}") + if vector_index.distance_measure: + index_options.append( + f"distance_measure={vector_index.distance_measure.value}" + ) + if vector_index.num_partitions: + index_options.append(f"num_partitions={vector_index.num_partitions}") + if vector_index.num_neighbors: + index_options.append(f"num_neighbors={vector_index.num_neighbors}") + index_options_query = ",".join(index_options) + + stmt = query_template.format(index_options_query) + self.engine._execute_outside_tx(stmt) + + def _get_vector_index_name(self): + query = f"SELECT index_name FROM mysql.vector_indexes WHERE table_name='{self.db_name}.{self.table_name}';" + result = self.engine._fetch(query) + if result: + return result[0]["index_name"] + else: + return None + + def drop_vector_index(self): + existing_index_name = self._get_vector_index_name() + if existing_index_name: + self.engine._execute_outside_tx( + f"CALL mysql.drop_vector_index('{existing_index_name}');" + ) + self.query_options.search_type = SearchType.KNN + return existing_index_name + @classmethod def from_texts( # type: ignore[override] cls: Type[MySQLVectorStore], @@ -225,6 +290,7 @@ def from_texts( # type: ignore[override] ignore_metadata_columns: Optional[List[str]] = None, id_column: str = "langchain_id", metadata_json_column: str = "langchain_metadata", + query_options: QueryOptions = DEFAULT_QUERY_OPTIONS, **kwargs: Any, ): vs = cls( @@ -237,6 +303,7 @@ def from_texts( # type: ignore[override] ignore_metadata_columns=ignore_metadata_columns, id_column=id_column, metadata_json_column=metadata_json_column, + query_options=query_options, ) vs.add_texts(texts, metadatas=metadatas, ids=ids, **kwargs) return vs @@ -255,6 +322,7 @@ def from_documents( # type: ignore[override] ignore_metadata_columns: Optional[List[str]] = None, id_column: str = "langchain_id", metadata_json_column: str = "langchain_metadata", + query_options: QueryOptions = DEFAULT_QUERY_OPTIONS, **kwargs: Any, ) -> MySQLVectorStore: vs = cls( @@ -267,6 +335,7 @@ def from_documents( # type: ignore[override] ignore_metadata_columns=ignore_metadata_columns, id_column=id_column, metadata_json_column=metadata_json_column, + query_options=query_options, ) texts = [doc.page_content for doc in documents] metadatas = [doc.metadata for doc in documents] diff --git a/tests/integration/test_mysql_vectorstore_index.py b/tests/integration/test_mysql_vectorstore_index.py new file mode 100644 index 0000000..f2eb302 --- /dev/null +++ b/tests/integration/test_mysql_vectorstore_index.py @@ -0,0 +1,180 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +import pytest +from langchain_community.embeddings import DeterministicFakeEmbedding +from langchain_core.documents import Document + +from langchain_google_cloud_sql_mysql import ( + DistanceMeasure, + IndexType, + MySQLEngine, + MySQLVectorStore, + SearchType, + VectorIndex, +) + +DEFAULT_TABLE = "test_table_" + str(uuid.uuid4()).split("-")[0] +TABLE_1000_ROWS = "test_table_1000_rows" +VECTOR_SIZE = 8 + +embeddings_service = DeterministicFakeEmbedding(size=VECTOR_SIZE) + + +def get_env_var(key: str, desc: str) -> str: + v = os.environ.get(key) + if v is None: + raise ValueError(f"Must set env var {key} to: {desc}") + return v + + +class TestVectorStoreFromMethods: + @pytest.fixture(scope="module") + def db_project(self) -> str: + return get_env_var("PROJECT_ID", "project id for google cloud") + + @pytest.fixture(scope="module") + def db_region(self) -> str: + return get_env_var("REGION", "region for cloud sql instance") + + @pytest.fixture(scope="module") + def db_instance(self) -> str: + return get_env_var("INSTANCE_ID", "instance for cloud sql") + + @pytest.fixture(scope="module") + def db_name(self) -> str: + return get_env_var("DB_NAME", "database name on cloud sql instance") + + @pytest.fixture(scope="class") + def engine(self, db_project, db_region, db_instance, db_name): + engine = MySQLEngine.from_instance( + project_id=db_project, + instance=db_instance, + region=db_region, + database=db_name, + ) + yield engine + + @pytest.fixture(scope="class") + def vs(self, engine): + engine.init_vectorstore_table( + DEFAULT_TABLE, + VECTOR_SIZE, + overwrite_existing=True, + ) + vs = MySQLVectorStore( + engine, + embedding_service=embeddings_service, + table_name=DEFAULT_TABLE, + ) + yield vs + vs.drop_vector_index() + engine._execute(f"DROP TABLE IF EXISTS `{DEFAULT_TABLE}`") + + @pytest.fixture(scope="class") + def vs_1000(self, engine): + result = engine._fetch("SHOW TABLES") + tables = [list(r.values())[0] for r in result] + if TABLE_1000_ROWS not in tables: + engine.init_vectorstore_table( + TABLE_1000_ROWS, + VECTOR_SIZE, + ) + vs_1000 = MySQLVectorStore( + engine, + embedding_service=embeddings_service, + table_name=TABLE_1000_ROWS, + ) + row_count = vs_1000.engine._fetch(f"SELECT count(*) FROM `{TABLE_1000_ROWS}`")[ + 0 + ]["count(*)"] + # Add 1000 rows of data if the number of rows is less than 1000 + if row_count < 1000: + texts_1000 = [ + f"{text}_{i}" + for text in ["apple", "dog", "basketball", "coffee"] + for i in range(1, 251) + ] + ids = [str(uuid.uuid4()) for _ in range(len(texts_1000))] + vs_1000.add_texts(texts_1000, ids=ids) + vs_1000.drop_vector_index() + yield vs_1000 + vs_1000.drop_vector_index() + + def test_create_and_drop_index(self, vs): + vs.apply_vector_index(VectorIndex()) + assert ( + vs._get_vector_index_name() + == f"{vs.db_name}.{vs.table_name}_langchainvectorindex" + ) + assert vs.query_options.search_type == SearchType.ANN + vs.drop_vector_index() + assert vs._get_vector_index_name() is None + assert vs.query_options.search_type == SearchType.KNN + + def test_update_index(self, vs): + vs.apply_vector_index(VectorIndex()) + assert ( + vs._get_vector_index_name() + == f"{vs.db_name}.{vs.table_name}_langchainvectorindex" + ) + assert vs.query_options.search_type == SearchType.ANN + vs.alter_vector_index( + VectorIndex( + index_type=IndexType.BRUTE_FORCE_SCAN, + distance_measure=DistanceMeasure.SQUARED_L2, + num_neighbors=10, + ) + ) + assert ( + vs._get_vector_index_name() + == f"{vs.db_name}.{vs.table_name}_langchainvectorindex" + ) + vs.drop_vector_index() + assert vs.query_options.search_type == SearchType.KNN + + def test_create_and_drop_index_tree_sq(self, vs_1000): + vs_1000.apply_vector_index( + VectorIndex( + name="tree_sq", + index_type=IndexType.TREE_SQ, + distance_measure=DistanceMeasure.SQUARED_L2, + num_partitions=1, + num_neighbors=5, + ) + ) + assert vs_1000._get_vector_index_name() == f"{vs_1000.db_name}.tree_sq" + assert vs_1000.query_options.search_type == SearchType.ANN + vs_1000.drop_vector_index() + assert vs_1000._get_vector_index_name() is None + assert vs_1000.query_options.search_type == SearchType.KNN + + def test_create_and_drop_index_tree_ah(self, vs_1000): + vs_1000.apply_vector_index( + VectorIndex( + name="tree_ah", + index_type=IndexType.TREE_AH, + distance_measure=DistanceMeasure.COSINE, + num_partitions=2, + num_neighbors=10, + ) + ) + assert vs_1000._get_vector_index_name() == f"{vs_1000.db_name}.tree_ah" + assert vs_1000.query_options.search_type == SearchType.ANN + vs_1000.drop_vector_index() + assert vs_1000._get_vector_index_name() is None + assert vs_1000.query_options.search_type == SearchType.KNN From 5b806947e5c827ebca553a68ff74a14c7d22a6a5 Mon Sep 17 00:00:00 2001 From: totoleon Date: Mon, 1 Apr 2024 11:09:23 -0700 Subject: [PATCH 18/23] feat: adding search functions and tests (#56) --- integration.cloudbuild.yaml | 8 +- pyproject.toml | 1 + requirements-3.8.txt | 5 + requirements.txt | 1 + .../engine.py | 7 + .../indexes.py | 33 +- .../loader.py | 2 +- .../vectorstore.py | 407 +++++++++++++++++- .../test_mysql_vectorstore_index.py | 4 +- .../test_mysql_vectorstore_search.py | 241 +++++++++++ 10 files changed, 684 insertions(+), 25 deletions(-) create mode 100644 requirements-3.8.txt create mode 100644 tests/integration/test_mysql_vectorstore_search.py diff --git a/integration.cloudbuild.yaml b/integration.cloudbuild.yaml index 61e569b..a40e367 100644 --- a/integration.cloudbuild.yaml +++ b/integration.cloudbuild.yaml @@ -15,8 +15,12 @@ steps: - id: Install dependencies name: python:${_VERSION} - entrypoint: pip - args: ["install", "--user", "-r", "requirements.txt"] + entrypoint: /bin/bash + args: + - -c + - | + if [[ $_VERSION == "3.8" ]]; then version="-3.8"; fi + pip install --user -r requirements${version}.txt - id: Install module (and test requirements) name: python:${_VERSION} diff --git a/pyproject.toml b/pyproject.toml index 56918e9..4bb6bb2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,6 +11,7 @@ authors = [ dependencies = [ "langchain-core>=0.1.1, <1.0.0", "langchain-community>=0.0.18, <1.0.0", + "numpy>=1.24.4, <2.0.0", "SQLAlchemy>=2.0.7, <3.0.0", "cloud-sql-python-connector[pymysql]>=1.7.0, <2.0.0" ] diff --git a/requirements-3.8.txt b/requirements-3.8.txt new file mode 100644 index 0000000..75bf3a8 --- /dev/null +++ b/requirements-3.8.txt @@ -0,0 +1,5 @@ +langchain==0.1.12 +langchain-community==0.0.28 +numpy==1.24.4 +SQLAlchemy==2.0.28 +cloud-sql-python-connector[pymysql]==1.8.0 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index ae62b02..38412ba 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,6 @@ langchain==0.1.12 langchain-community==0.0.28 +numpy==1.26.4 SQLAlchemy==2.0.28 cloud-sql-python-connector[pymysql]==1.8.0 diff --git a/src/langchain_google_cloud_sql_mysql/engine.py b/src/langchain_google_cloud_sql_mysql/engine.py index 9dd7375..fbde944 100644 --- a/src/langchain_google_cloud_sql_mysql/engine.py +++ b/src/langchain_google_cloud_sql_mysql/engine.py @@ -241,6 +241,13 @@ def _fetch(self, query: str, params: Optional[dict] = None): result_fetch = result_map.fetchall() return result_fetch + def _fetch_rows(self, query: str, params: Optional[dict] = None): + """Fetch results from a SQL query as rows.""" + with self.engine.connect() as conn: + result = conn.execute(sqlalchemy.text(query), params) + result_fetch = result.fetchall() # Directly fetch rows + return result_fetch + def init_chat_history_table(self, table_name: str) -> None: """Create table with schema required for MySQLChatMessageHistory class. diff --git a/src/langchain_google_cloud_sql_mysql/indexes.py b/src/langchain_google_cloud_sql_mysql/indexes.py index e7a6fdc..81dbf7a 100644 --- a/src/langchain_google_cloud_sql_mysql/indexes.py +++ b/src/langchain_google_cloud_sql_mysql/indexes.py @@ -29,18 +29,33 @@ class SearchType(Enum): ANN = "ANN" +class DistanceMeasure(Enum): + """Enumerates the types of distance measures that can be used in searches. + + Attributes: + COSINE: Cosine similarity measure. + L2_SQUARED: Squared L2 norm (Euclidean) distance. + DOT_PRODUCT: Dot product similarity. + """ + + COSINE = "cosine" + L2_SQUARED = "l2_squared" + DOT_PRODUCT = "dot_product" + + @dataclass class QueryOptions: """Holds configuration options for executing a search query. Attributes: num_partitions (Optional[int]): The number of partitions to divide the search space into. None means default partitioning. - num_neighbors (Optional[int]): The number of nearest neighbors to retrieve. None means use the default. + num_neighbors (int): The number of nearest neighbors to retrieve. Default to 10. search_type (SearchType): The type of search algorithm to use. Defaults to KNN. """ num_partitions: Optional[int] = None - num_neighbors: Optional[int] = None + num_neighbors: int = 10 + distance_measure: DistanceMeasure = DistanceMeasure.L2_SQUARED search_type: SearchType = SearchType.KNN @@ -61,20 +76,6 @@ class IndexType(Enum): TREE_SQ = "TREE_SQ" -class DistanceMeasure(Enum): - """Enumerates the types of distance measures that can be used in searches. - - Attributes: - COSINE: Cosine similarity measure. - SQUARED_L2: Squared L2 norm (Euclidean) distance. - DOT_PRODUCT: Dot product similarity. - """ - - COSINE = "cosine" - SQUARED_L2 = "squared_l2" - DOT_PRODUCT = "dot_product" - - class VectorIndex: """Represents a vector index for storing and querying vectors. diff --git a/src/langchain_google_cloud_sql_mysql/loader.py b/src/langchain_google_cloud_sql_mysql/loader.py index 26bfaaa..89e0ff1 100644 --- a/src/langchain_google_cloud_sql_mysql/loader.py +++ b/src/langchain_google_cloud_sql_mysql/loader.py @@ -29,7 +29,7 @@ def _parse_doc_from_row( content_columns: Iterable[str], metadata_columns: Iterable[str], row: Dict, - metadata_json_column: str = DEFAULT_METADATA_COL, + metadata_json_column: Optional[str] = DEFAULT_METADATA_COL, ) -> Document: page_content = " ".join( str(row[column]) for column in content_columns if column in row diff --git a/src/langchain_google_cloud_sql_mysql/vectorstore.py b/src/langchain_google_cloud_sql_mysql/vectorstore.py index c5f363e..6f44d48 100644 --- a/src/langchain_google_cloud_sql_mysql/vectorstore.py +++ b/src/langchain_google_cloud_sql_mysql/vectorstore.py @@ -16,14 +16,22 @@ from __future__ import annotations import json -from typing import Any, Iterable, List, Optional, Type +from typing import Any, Iterable, List, Optional, Tuple, Type, Union +import numpy as np from langchain_core.documents import Document from langchain_core.embeddings import Embeddings from langchain_core.vectorstores import VectorStore from .engine import MySQLEngine -from .indexes import DEFAULT_QUERY_OPTIONS, QueryOptions, SearchType, VectorIndex +from .indexes import ( + DEFAULT_QUERY_OPTIONS, + DistanceMeasure, + QueryOptions, + SearchType, + VectorIndex, +) +from .loader import _parse_doc_from_row DEFAULT_INDEX_NAME_SUFFIX = "langchainvectorindex" @@ -41,6 +49,9 @@ def __init__( id_column: str = "langchain_id", metadata_json_column: Optional[str] = "langchain_metadata", query_options: QueryOptions = DEFAULT_QUERY_OPTIONS, + k: int = 4, + fetch_k: int = 20, + lambda_mult: float = 0.5, ): """Constructor for MySQLVectorStore. Args: @@ -120,6 +131,9 @@ def __init__( self.id_column = id_column self.metadata_json_column = metadata_json_column self.query_options = query_options + self.k = k + self.fetch_k = fetch_k + self.lambda_mult = lambda_mult self.db_name = self.__get_db_name() @property @@ -130,6 +144,12 @@ def __get_db_name(self) -> str: result = self.engine._fetch("SELECT DATABASE();") return result[0]["DATABASE()"] + def __get_column_names(self) -> List[str]: + results = self.engine._fetch( + f"SELECT COLUMN_NAME FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = '{self.db_name}' AND `TABLE_NAME` = '{self.table_name}'" + ) + return [r["COLUMN_NAME"] for r in results] + def _add_embeddings( self, texts: Iterable[str], @@ -348,5 +368,384 @@ def similarity_search( k: Optional[int] = None, filter: Optional[str] = None, **kwargs: Any, - ): - raise NotImplementedError + ) -> List[Document]: + """Searches for similar documents based on a text query. + + Args: + query: The text query to search for. + k: The number of similar documents to return. + filter: A filter expression to apply to the search results. + **kwargs: Additional keyword arguments to pass to the search function. + + Returns: + A list of similar documents. + """ + embedding = self.embedding_service.embed_query(query) + docs = self.similarity_search_by_vector( + embedding=embedding, k=k, filter=filter, **kwargs + ) + return docs + + def similarity_search_by_vector( + self, + embedding: List[float], + k: Optional[int] = None, + filter: Optional[str] = None, + query_options: Optional[QueryOptions] = None, + **kwargs: Any, + ) -> List[Document]: + """Searches for similar documents based on a vector embedding. + + Args: + embedding: The vector embedding to search for. + k: The number of similar documents to return. + filter: A filter expression to apply to the search results. + query_options: Additional query options. + **kwargs: Additional keyword arguments to pass to the search function. + + Returns: + A list of similar documents. + """ + docs_and_scores = self.similarity_search_with_score_by_vector( + embedding=embedding, + k=k, + filter=filter, + query_options=query_options, + **kwargs, + ) + + return [doc for doc, _ in docs_and_scores] + + def similarity_search_with_score( + self, + query: str, + k: Optional[int] = None, + filter: Optional[str] = None, + query_options: Optional[QueryOptions] = None, + **kwargs: Any, + ) -> List[Tuple[Document, float]]: + """Searches for similar documents based on a text query and returns their scores. + + Args: + query: The text query to search for. + k: The number of similar documents to return. + filter: A filter expression to apply to the search results. + query_options: Additional query options. + **kwargs: Additional keyword arguments to pass to the search function. + + Returns: + A list of tuples, where each tuple contains a document and its similarity score. + """ + embedding = self.embedding_service.embed_query(query) + docs_with_scores = self.similarity_search_with_score_by_vector( + embedding=embedding, + k=k, + filter=filter, + query_options=query_options, + **kwargs, + ) + return docs_with_scores + + def similarity_search_with_score_by_vector( + self, + embedding: List[float], + k: Optional[int] = None, + filter: Optional[str] = None, + query_options: Optional[QueryOptions] = None, + **kwargs: Any, + ) -> List[Tuple[Document, float]]: + """Searches for similar documents based on a vector embedding and returns their scores. + + Args: + embedding: The vector embedding to search for. + k: The number of similar documents to return. + filter: A filter expression to apply to the search results. + query_options: Additional query options. + **kwargs: Additional keyword arguments to pass to the search function. + + Returns: + A list of tuples, where each tuple contains a document and its similarity score. + """ + results = self._query_collection( + embedding=embedding, + k=k, + filter=filter, + map_results=False, + query_options=query_options, + **kwargs, + ) + + documents_with_scores = [] + + for row in results: + row = row._asdict() + if row.get(self.metadata_json_column): + row[self.metadata_json_column] = json.loads( + row[self.metadata_json_column] + ) + document = _parse_doc_from_row( + content_columns=[self.content_column], + metadata_columns=self.metadata_columns, + row=row, + metadata_json_column=self.metadata_json_column, + ) + + documents_with_scores.append( + ( + document, + row["distance"], + ) + ) + + return documents_with_scores + + def max_marginal_relevance_search( + self, + query: str, + k: Optional[int] = None, + fetch_k: Optional[int] = None, + lambda_mult: Optional[float] = None, + filter: Optional[str] = None, + query_options: Optional[QueryOptions] = None, + **kwargs: Any, + ) -> List[Document]: + """Performs Maximal Marginal Relevance (MMR) search based on a text query. + + Args: + query: The text query to search for. + k: The number of documents to return. + fetch_k: The number of documents to initially retrieve. + lambda_mult: The weight for balancing relevance and diversity. + filter: A filter expression to apply to the search results. + query_options: Additional query options. + **kwargs: Additional keyword arguments to pass to the search function. + + Returns: + A list of documents selected using MMR. + """ + embedding = self.embedding_service.embed_query(text=query) + + return self.max_marginal_relevance_search_by_vector( + embedding=embedding, + k=k, + fetch_k=fetch_k, + lambda_mult=lambda_mult, + filter=filter, + query_options=query_options, + **kwargs, + ) + + def max_marginal_relevance_search_by_vector( + self, + embedding: List[float], + k: Optional[int] = None, + fetch_k: Optional[int] = None, + lambda_mult: Optional[float] = None, + filter: Optional[str] = None, + query_options: Optional[QueryOptions] = None, + **kwargs: Any, + ) -> List[Document]: + """Performs Maximal Marginal Relevance (MMR) search based on a vector embedding. + + Args: + embedding: The vector embedding to search for. + k: The number of documents to return. + fetch_k: The number of documents to initially retrieve. + lambda_mult: The weight for balancing relevance and diversity. + filter: A filter expression to apply to the search results. + query_options: Additional query options. + **kwargs: Additional keyword arguments to pass to the search function. + + Returns: + A list of documents selected using MMR. + """ + docs_and_scores = self.max_marginal_relevance_search_with_score_by_vector( + embedding, + k=k, + fetch_k=fetch_k, + lambda_mult=lambda_mult, + filter=filter, + query_options=query_options, + **kwargs, + ) + + return [result[0] for result in docs_and_scores] + + def max_marginal_relevance_search_with_score_by_vector( + self, + embedding: List[float], + k: Optional[int] = None, + fetch_k: Optional[int] = None, + lambda_mult: Optional[float] = None, + filter: Optional[str] = None, + query_options: Optional[QueryOptions] = None, + **kwargs: Any, + ) -> List[Tuple[Document, float]]: + """Performs Maximal Marginal Relevance (MMR) search based on a vector embedding and returns documents with scores. + + Args: + embedding: The vector embedding to search for. + k: The number of documents to return. + fetch_k: The number of documents to initially retrieve. + lambda_mult: The weight for balancing relevance and diversity. + filter: A filter expression to apply to the search results. + query_options: Additional query options. + **kwargs: Additional keyword arguments to pass to the search function. + + Returns: + A list of tuples, where each tuple contains a document and its similarity score, selected using MMR. + """ + results = self._query_collection( + embedding=embedding, + k=fetch_k, + filter=filter, + map_results=False, + query_options=query_options, + **kwargs, + ) + results = [row._asdict() for row in results] + + k = k if k else self.k + fetch_k = fetch_k if fetch_k else self.fetch_k + lambda_mult = lambda_mult if lambda_mult else self.lambda_mult + embedding_list = [json.loads(row[self.embedding_column]) for row in results] + mmr_selected = maximal_marginal_relevance( + np.array(embedding, dtype=np.float32), + embedding_list, + k=k, + lambda_mult=lambda_mult, + ) + + documents_with_scores = [] + for row in results: + if row.get(self.metadata_json_column): + row[self.metadata_json_column] = json.loads( + row[self.metadata_json_column] + ) + document = _parse_doc_from_row( + content_columns=[self.content_column], + metadata_columns=self.metadata_columns, + row=row, + metadata_json_column=self.metadata_json_column, + ) + + documents_with_scores.append( + ( + document, + row["distance"], + ) + ) + + return [r for i, r in enumerate(documents_with_scores) if i in mmr_selected] + + def _query_collection( + self, + embedding: List[float], + k: Optional[int] = None, + filter: Optional[str] = None, + query_options: Optional[QueryOptions] = None, + map_results: Optional[bool] = True, + ) -> List[Any]: + column_names = self.__get_column_names() + # Apply vector_to_string to the embedding_column + for i, v in enumerate(column_names): + if v == self.embedding_column: + column_names[i] = f"vector_to_string({v}) as {self.embedding_column}" + column_query = ", ".join(column_names) + query_options = query_options if query_options else self.query_options + if query_options.num_partitions and query_options.search_type == SearchType.KNN: + raise ValueError("num_partitions is not supported for the search type KNN") + + k = k if k else query_options.num_neighbors + distance_function = ( + f"{query_options.distance_measure.value}_distance" + if query_options.distance_measure != DistanceMeasure.DOT_PRODUCT + else query_options.distance_measure.value + ) + if query_options.search_type == SearchType.KNN: + filter = f"WHERE {filter}" if filter else "" + stmt = f"SELECT {column_query}, {distance_function}({self.embedding_column}, string_to_vector('{embedding}')) AS distance FROM {self.table_name} {filter} ORDER BY distance LIMIT {k};" + else: + filter = f"AND {filter}" if filter else "" + num_partitions = ( + f",num_partitions={query_options.num_partitions}" + if query_options.num_partitions + else "" + ) + stmt = f"SELECT {column_query}, {distance_function}({self.embedding_column}, string_to_vector('{embedding}')) AS distance FROM {self.table_name} WHERE NEAREST({self.embedding_column}) TO (string_to_vector('{embedding}'), 'num_neighbors={k}{num_partitions}') {filter} ORDER BY distance;" + + # return self.engine._fetch(stmt) + if map_results: + return self.engine._fetch(stmt) + else: + return self.engine._fetch_rows(stmt) + + +### The following is copied from langchain-community until it's moved into core + +Matrix = Union[List[List[float]], List[np.ndarray], np.ndarray] + + +def maximal_marginal_relevance( + query_embedding: np.ndarray, + embedding_list: list, + lambda_mult: float = 0.5, + k: int = 4, +) -> List[int]: + """Calculate maximal marginal relevance.""" + if min(k, len(embedding_list)) <= 0: + return [] + if query_embedding.ndim == 1: + query_embedding = np.expand_dims(query_embedding, axis=0) + similarity_to_query = cosine_similarity(query_embedding, embedding_list)[0] + most_similar = int(np.argmax(similarity_to_query)) + idxs = [most_similar] + selected = np.array([embedding_list[most_similar]]) + while len(idxs) < min(k, len(embedding_list)): + best_score = -np.inf + idx_to_add = -1 + similarity_to_selected = cosine_similarity(embedding_list, selected) + for i, query_score in enumerate(similarity_to_query): + if i in idxs: + continue + redundant_score = max(similarity_to_selected[i]) + equation_score = ( + lambda_mult * query_score - (1 - lambda_mult) * redundant_score + ) + if equation_score > best_score: + best_score = equation_score + idx_to_add = i + idxs.append(idx_to_add) + selected = np.append(selected, [embedding_list[idx_to_add]], axis=0) + return idxs + + +def cosine_similarity(X: Matrix, Y: Matrix) -> np.ndarray: + """Row-wise cosine similarity between two equal-width matrices.""" + if len(X) == 0 or len(Y) == 0: + return np.array([]) + + X = np.array(X) + Y = np.array(Y) + if X.shape[1] != Y.shape[1]: + raise ValueError( + f"Number of columns in X and Y must be the same. X has shape {X.shape} " + f"and Y has shape {Y.shape}." + ) + try: + import simsimd as simd # type: ignore + + X = np.array(X, dtype=np.float32) + Y = np.array(Y, dtype=np.float32) + Z = 1 - simd.cdist(X, Y, metric="cosine") + if isinstance(Z, float): + return np.array([Z]) + return Z + except ImportError: + X_norm = np.linalg.norm(X, axis=1) + Y_norm = np.linalg.norm(Y, axis=1) + # Ignore divide by zero errors run time warnings as those are handled below. + with np.errstate(divide="ignore", invalid="ignore"): + similarity = np.dot(X, Y.T) / np.outer(X_norm, Y_norm) + similarity[np.isnan(similarity) | np.isinf(similarity)] = 0.0 + return similarity diff --git a/tests/integration/test_mysql_vectorstore_index.py b/tests/integration/test_mysql_vectorstore_index.py index f2eb302..419d57d 100644 --- a/tests/integration/test_mysql_vectorstore_index.py +++ b/tests/integration/test_mysql_vectorstore_index.py @@ -136,7 +136,7 @@ def test_update_index(self, vs): vs.alter_vector_index( VectorIndex( index_type=IndexType.BRUTE_FORCE_SCAN, - distance_measure=DistanceMeasure.SQUARED_L2, + distance_measure=DistanceMeasure.L2_SQUARED, num_neighbors=10, ) ) @@ -152,7 +152,7 @@ def test_create_and_drop_index_tree_sq(self, vs_1000): VectorIndex( name="tree_sq", index_type=IndexType.TREE_SQ, - distance_measure=DistanceMeasure.SQUARED_L2, + distance_measure=DistanceMeasure.L2_SQUARED, num_partitions=1, num_neighbors=5, ) diff --git a/tests/integration/test_mysql_vectorstore_search.py b/tests/integration/test_mysql_vectorstore_search.py new file mode 100644 index 0000000..0720191 --- /dev/null +++ b/tests/integration/test_mysql_vectorstore_search.py @@ -0,0 +1,241 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +import pytest +from langchain_community.embeddings import DeterministicFakeEmbedding + +from langchain_google_cloud_sql_mysql import ( + DistanceMeasure, + IndexType, + MySQLEngine, + MySQLVectorStore, + QueryOptions, + SearchType, + VectorIndex, +) + +TABLE_1000_ROWS = "test_table_1000_rows_search" +VECTOR_SIZE = 8 + +embeddings_service = DeterministicFakeEmbedding(size=VECTOR_SIZE) + + +def get_env_var(key: str, desc: str) -> str: + v = os.environ.get(key) + if v is None: + raise ValueError(f"Must set env var {key} to: {desc}") + return v + + +class TestVectorStoreFromMethods: + apple_100_text = "apple_100" + embeddings_service = DeterministicFakeEmbedding(size=VECTOR_SIZE) + apple_100_embedding = embeddings_service.embed_query(apple_100_text) + + @pytest.fixture(scope="module") + def db_project(self) -> str: + return get_env_var("PROJECT_ID", "project id for google cloud") + + @pytest.fixture(scope="module") + def db_region(self) -> str: + return get_env_var("REGION", "region for cloud sql instance") + + @pytest.fixture(scope="module") + def db_instance(self) -> str: + return get_env_var("INSTANCE_ID", "instance for cloud sql") + + @pytest.fixture(scope="module") + def db_name(self) -> str: + return get_env_var("DB_NAME", "database name on cloud sql instance") + + @pytest.fixture(scope="class") + def engine(self, db_project, db_region, db_instance, db_name): + engine = MySQLEngine.from_instance( + project_id=db_project, + instance=db_instance, + region=db_region, + database=db_name, + ) + yield engine + + @pytest.fixture(scope="class") + def vs_1000(self, engine): + result = engine._fetch("SHOW TABLES") + tables = [list(r.values())[0] for r in result] + if TABLE_1000_ROWS not in tables: + engine.init_vectorstore_table( + TABLE_1000_ROWS, + VECTOR_SIZE, + ) + vs_1000 = MySQLVectorStore( + engine, + embedding_service=self.embeddings_service, + table_name=TABLE_1000_ROWS, + ) + row_count = vs_1000.engine._fetch(f"SELECT count(*) FROM `{TABLE_1000_ROWS}`")[ + 0 + ]["count(*)"] + # Add 1000 rows of data if the number of rows is less than 1000 + if row_count < 1000: + texts_1000 = [ + f"{text}_{i}" + for text in ["apple", "dog", "basketball", "coffee"] + for i in range(1, 251) + ] + ids = [str(uuid.uuid4()) for _ in range(len(texts_1000))] + vs_1000.add_texts(texts_1000, ids=ids) + vs_1000.drop_vector_index() + vs_1000.apply_vector_index(VectorIndex(index_type=IndexType.TREE_SQ)) + yield vs_1000 + vs_1000.drop_vector_index() + + def test_search_query_collection_knn(self, vs_1000): + result = vs_1000._query_collection(self.apple_100_embedding, k=10) + assert len(result) == 10 + assert result[0]["content"] == self.apple_100_text + + def test_search_query_collection_knn_with_filter(self, vs_1000): + vs_1000.drop_vector_index() + result = vs_1000._query_collection( + self.apple_100_embedding, k=5, filter=f"content != '{self.apple_100_text}'" + ) + assert len(result) == 5 + assert result[0]["content"] == "apple_154" + + def test_search_query_collection_distance_measure(self, vs_1000): + for measure in [ + DistanceMeasure.COSINE, + DistanceMeasure.DOT_PRODUCT, + DistanceMeasure.L2_SQUARED, + ]: + assert ( + vs_1000._query_collection( + self.apple_100_embedding, + query_options=QueryOptions(distance_measure=measure), + )[0]["content"] + == self.apple_100_text + ) + + def test_search_raise_when_num_partitions_set_for_knn(self, vs_1000): + with pytest.raises( + ValueError, match="num_partitions is not supported for the search type KNN" + ): + vs_1000._query_collection( + self.apple_100_embedding, + k=1, + filter="content != 'apple_100'", + query_options=QueryOptions(num_partitions=2), + ) + + def test_query_collection_ann_with_different_index_types(self, vs_1000): + vs_1000.apply_vector_index(VectorIndex(index_type=IndexType.BRUTE_FORCE_SCAN)) + result = vs_1000._query_collection(self.apple_100_embedding) + assert len(result) == 10 + assert result[0]["content"] == self.apple_100_text + + result = vs_1000._query_collection(self.apple_100_embedding, k=1) + assert result[0]["content"] == self.apple_100_text + + vs_1000.alter_vector_index(VectorIndex(index_type=IndexType.TREE_SQ)) + result = vs_1000._query_collection( + self.apple_100_embedding, + k=5, + query_options=QueryOptions(num_partitions=2, search_type=SearchType.ANN), + ) + assert len(result) == 5 + assert result[0]["content"] == self.apple_100_text + + vs_1000.alter_vector_index(VectorIndex(index_type=IndexType.TREE_AH)) + result = vs_1000._query_collection( + self.apple_100_embedding, k=5, filter=f"content != '{self.apple_100_text}'" + ) + assert len(result) == 4 + assert result[0]["content"] == "apple_154" + + def test_similarity_search_with_score_by_vector(self, vs_1000): + vs_1000.alter_vector_index(VectorIndex(index_type=IndexType.TREE_AH)) + docs_with_scores = vs_1000.similarity_search_with_score_by_vector( + self.apple_100_embedding, k=5 + ) + assert len(docs_with_scores) == 5 + assert docs_with_scores[0][0].page_content == self.apple_100_text + assert docs_with_scores[0][1] == 0 + + docs_with_scores = vs_1000.similarity_search_with_score_by_vector( + self.apple_100_embedding, + k=1, + query_options=QueryOptions( + distance_measure=DistanceMeasure.DOT_PRODUCT, search_type=SearchType.KNN + ), + ) + assert len(docs_with_scores) == 1 + assert docs_with_scores[0][0].page_content == self.apple_100_text + + def test_similarity_search_by_vector(self, vs_1000): + docs_with_scores = vs_1000.similarity_search_with_score_by_vector( + self.apple_100_embedding, k=5 + ) + docs = vs_1000.similarity_search_by_vector(self.apple_100_embedding, k=5) + assert [doc_with_score[0] for doc_with_score in docs_with_scores] == docs + + def test_similarity_search_with_score(self, vs_1000): + docs_with_scores = vs_1000.similarity_search_with_score_by_vector( + self.apple_100_embedding, k=5 + ) + docs_with_scores_from_text_search = vs_1000.similarity_search_with_score( + self.apple_100_text, k=5 + ) + assert docs_with_scores == docs_with_scores_from_text_search + + def test_similarity_search(self, vs_1000): + docs_with_scores = vs_1000.similarity_search_with_score_by_vector( + self.apple_100_embedding, + k=5, + query_options=QueryOptions(num_partitions=2, search_type=SearchType.ANN), + ) + docs = vs_1000.similarity_search( + self.apple_100_text, + k=5, + query_options=QueryOptions(num_partitions=2, search_type=SearchType.ANN), + ) + assert [doc_with_score[0] for doc_with_score in docs_with_scores] == docs + + def test_max_marginal_relevance_search_with_score_by_vector(self, vs_1000): + docs_with_scores = vs_1000.max_marginal_relevance_search_with_score_by_vector( + self.apple_100_embedding, k=5 + ) + assert len(docs_with_scores) == 5 + assert docs_with_scores[0][0].page_content == self.apple_100_text + assert docs_with_scores[0][1] == 0 + + def test_max_marginal_relevance_search_by_vector(self, vs_1000): + docs_with_scores = vs_1000.max_marginal_relevance_search_with_score_by_vector( + self.apple_100_embedding, k=5 + ) + docs = vs_1000.max_marginal_relevance_search_by_vector( + self.apple_100_embedding, k=5 + ) + assert [doc_with_score[0] for doc_with_score in docs_with_scores] == docs + + def test_max_marginal_relevance_search(self, vs_1000): + docs_with_scores = vs_1000.max_marginal_relevance_search_by_vector( + self.apple_100_embedding, k=5 + ) + docs_with_scores_from_text_search = vs_1000.max_marginal_relevance_search( + self.apple_100_text, k=5 + ) + assert docs_with_scores == docs_with_scores_from_text_search From e871c2b503fb0d056d7e374394db36e44dcda4c2 Mon Sep 17 00:00:00 2001 From: Jack Wotherspoon Date: Tue, 2 Apr 2024 16:48:49 -0400 Subject: [PATCH 19/23] docs: add basic MySQLVectorStore usage to README (#58) * docs: add basic MySQLVectorStore usage to README * chore: update code block * chore: update sample for consistency * Update README.md --------- Co-authored-by: Averi Kitsch --- README.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/README.md b/README.md index fb7508a..45132ec 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,28 @@ source /bin/activate /bin/pip install langchain-google-cloud-sql-mysql ``` +## Vector Store Usage + +Use a [vector store](https://python.langchain.com/docs/modules/data_connection/vectorstores/) to store +embedded data and perform vector search. + +```python +from langchain_google_cloud_sql_mysql import MySQLEngine, MySQLVectorStore +from langchain_google_vertexai import VertexAIEmbeddings + + +engine = MySQLEngine.from_instance("project-id", "region", "my-instance", "my-database") +engine.init_vectorstore_table( + table_name="my-table-name", + vector_size=768, # Vector size for `VertexAIEmbeddings()` +) +vectorstore = MySQLVectorStore( + engine, + embedding_service=VertextAIEmbeddings(), + table_name="my-table-name", +) +``` + ## Document Loader Usage Use a [document loader](https://python.langchain.com/docs/modules/data_connection/document_loaders/) to load data as LangChain `Document`s. From 9145308878aaf2f04d106f4401fdae9923aaa2da Mon Sep 17 00:00:00 2001 From: totoleon Date: Tue, 2 Apr 2024 16:18:45 -0700 Subject: [PATCH 20/23] chore: update docstring for MySQLVectorStore (#57) * doc: adding docstring to vector store --- .../vectorstore.py | 15 +++++++++++++-- .../integration/test_mysql_vectorstore_search.py | 8 ++++++-- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/src/langchain_google_cloud_sql_mysql/vectorstore.py b/src/langchain_google_cloud_sql_mysql/vectorstore.py index 6f44d48..2ac2f93 100644 --- a/src/langchain_google_cloud_sql_mysql/vectorstore.py +++ b/src/langchain_google_cloud_sql_mysql/vectorstore.py @@ -48,10 +48,10 @@ def __init__( ignore_metadata_columns: Optional[List[str]] = None, id_column: str = "langchain_id", metadata_json_column: Optional[str] = "langchain_metadata", - query_options: QueryOptions = DEFAULT_QUERY_OPTIONS, k: int = 4, fetch_k: int = 20, lambda_mult: float = 0.5, + query_options: QueryOptions = DEFAULT_QUERY_OPTIONS, ): """Constructor for MySQLVectorStore. Args: @@ -71,6 +71,17 @@ def __init__( Defaults to "langchain_id". metadata_json_column (str): Column to store metadata as JSON. Defaults to "langchain_metadata". + k (int): The number of documents to return as the final result of a + similarity search. Defaults to 4. + fetch_k (int): The number of documents to initially retrieve from + the database during a similarity search. These documents are + then re-ranked using MMR to select the final `k` documents. + Defaults to 20. + lambda_mult (float): The weight used to balance relevance and + diversity in the MMR algorithm. A higher value emphasizes + diversity more, while a lower value prioritizes relevance. + Defaults to 0.5. + query_options: Additional query options. """ if metadata_columns and ignore_metadata_columns: raise ValueError( @@ -130,10 +141,10 @@ def __init__( self.metadata_columns = metadata_columns self.id_column = id_column self.metadata_json_column = metadata_json_column - self.query_options = query_options self.k = k self.fetch_k = fetch_k self.lambda_mult = lambda_mult + self.query_options = query_options self.db_name = self.__get_db_name() @property diff --git a/tests/integration/test_mysql_vectorstore_search.py b/tests/integration/test_mysql_vectorstore_search.py index 0720191..71bed07 100644 --- a/tests/integration/test_mysql_vectorstore_search.py +++ b/tests/integration/test_mysql_vectorstore_search.py @@ -28,8 +28,9 @@ VectorIndex, ) -TABLE_1000_ROWS = "test_table_1000_rows_search" +TABLE_1000_ROWS = "test_table_1000_rows_search" + str(uuid.uuid4()).split("-")[0] VECTOR_SIZE = 8 +DEFAULT_INDEX = VectorIndex(index_type=IndexType.TREE_SQ) embeddings_service = DeterministicFakeEmbedding(size=VECTOR_SIZE) @@ -99,9 +100,10 @@ def vs_1000(self, engine): ids = [str(uuid.uuid4()) for _ in range(len(texts_1000))] vs_1000.add_texts(texts_1000, ids=ids) vs_1000.drop_vector_index() - vs_1000.apply_vector_index(VectorIndex(index_type=IndexType.TREE_SQ)) + vs_1000.apply_vector_index(DEFAULT_INDEX) yield vs_1000 vs_1000.drop_vector_index() + engine._execute(f"DROP TABLE IF EXISTS `{TABLE_1000_ROWS}`") def test_search_query_collection_knn(self, vs_1000): result = vs_1000._query_collection(self.apple_100_embedding, k=10) @@ -117,6 +119,7 @@ def test_search_query_collection_knn_with_filter(self, vs_1000): assert result[0]["content"] == "apple_154" def test_search_query_collection_distance_measure(self, vs_1000): + vs_1000.apply_vector_index(DEFAULT_INDEX) for measure in [ DistanceMeasure.COSINE, DistanceMeasure.DOT_PRODUCT, @@ -129,6 +132,7 @@ def test_search_query_collection_distance_measure(self, vs_1000): )[0]["content"] == self.apple_100_text ) + vs_1000.drop_vector_index() def test_search_raise_when_num_partitions_set_for_knn(self, vs_1000): with pytest.raises( From 0ece837e98ff60512d26b5c7c8fb4803e056ad3c Mon Sep 17 00:00:00 2001 From: Jack Wotherspoon Date: Wed, 3 Apr 2024 19:13:13 -0400 Subject: [PATCH 21/23] docs: add MySQLVectorStore reference notebook (#59) * docs: add MySQLVectorStore reference notebook * chore: update advanced sample * chore: update sample --- README.md | 3 + docs/vector_store.ipynb | 606 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 609 insertions(+) create mode 100644 docs/vector_store.ipynb diff --git a/README.md b/README.md index 45132ec..f2042e3 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,8 @@ vectorstore = MySQLVectorStore( ) ``` +See the full [Vector Store][vectorstore] tutorial. + ## Document Loader Usage Use a [document loader](https://python.langchain.com/docs/modules/data_connection/document_loaders/) to load data as LangChain `Document`s. @@ -117,3 +119,4 @@ This is not an officially supported Google product. [loader]: ./docs/document_loader.ipynb [history]: ./docs/chat_message_history.ipynb [langchain]: https://github.com/langchain-ai/langchain +[vectorstore]: https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/tree/main/docs/vector_store.ipynb diff --git a/docs/vector_store.ipynb b/docs/vector_store.ipynb new file mode 100644 index 0000000..ac293fc --- /dev/null +++ b/docs/vector_store.ipynb @@ -0,0 +1,606 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "DspIHgfGLL5u" + }, + "source": [ + "# Google Cloud SQL for MySQL\n", + "\n", + "> [Cloud SQL](https://cloud.google.com/sql) is a fully managed relational database service that offers high performance, seamless integration, and impressive scalability. It offers PostgreSQL, MySQL, and SQL Server database engines. Extend your database application to build AI-powered experiences leveraging Cloud SQL's LangChain integrations.\n", + "\n", + "This notebook goes over how to use `Cloud SQL for MySQL` to store vector embeddings with the `MySQLVectorStore` class.\n", + "\n", + "Learn more about the package on [GitHub](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/).\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/googleapis/langchain-google-cloud-sql-mysql-python/blob/main/docs/vector_store.ipynb)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "4_aOTN3FLL5x" + }, + "source": [ + "## Before you begin\n", + "\n", + "To run this notebook, you will need to do the following:\n", + "\n", + " * [Create a Google Cloud Project](https://developers.google.com/workspace/guides/create-project)\n", + " * [Enable the Cloud SQL Admin API.](https://console.cloud.google.com/flows/enableapi?apiid=sqladmin.googleapis.com)\n", + " * [Create a Cloud SQL instance.](https://cloud.google.com/sql/docs/mysql/connect-instance-auth-proxy#create-instance) (version must be >= 8.0.36)\n", + " * [Create a Cloud SQL database.](https://cloud.google.com/sql/docs/mysql/create-manage-databases)\n", + " * [Add a User to the database.](https://cloud.google.com/sql/docs/mysql/create-manage-users)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "IR54BmgvdHT_" + }, + "source": [ + "### 🦜🔗 Library Installation\n", + "Install the integration library, `langchain-google-cloud-sql-mysql`, and the library for the embedding service, `langchain-google-vertexai`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "0ZITIDE160OD" + }, + "outputs": [], + "source": [ + "%pip install --upgrade --quiet langchain-google-cloud-sql-mysql langchain-google-vertexai" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "v40bB_GMcr9f" + }, + "source": [ + "**Colab only:** Uncomment the following cell to restart the kernel or use the button to restart the kernel. For Vertex AI Workbench you can restart the terminal using the button on top." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "v6jBDnYnNM08" + }, + "outputs": [], + "source": [ + "# # Automatically restart kernel after installs so that your environment can access the new packages\n", + "# import IPython\n", + "\n", + "# app = IPython.Application.instance()\n", + "# app.kernel.do_shutdown(True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "yygMe6rPWxHS" + }, + "source": [ + "### 🔐 Authentication\n", + "Authenticate to Google Cloud as the IAM user logged into this notebook in order to access your Google Cloud Project.\n", + "\n", + "* If you are using Colab to run this notebook, use the cell below and continue.\n", + "* If you are using Vertex AI Workbench, check out the setup instructions [here](https://github.com/GoogleCloudPlatform/generative-ai/tree/main/setup-env)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "PTXN1_DSXj2b" + }, + "outputs": [], + "source": [ + "from google.colab import auth\n", + "\n", + "auth.authenticate_user()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "NEvB9BoLEulY" + }, + "source": [ + "### ☁ Set Your Google Cloud Project\n", + "Set your Google Cloud project so that you can leverage Google Cloud resources within this notebook.\n", + "\n", + "If you don't know your project ID, try the following:\n", + "\n", + "* Run `gcloud config list`.\n", + "* Run `gcloud projects list`.\n", + "* See the support page: [Locate the project ID](https://support.google.com/googleapi/answer/7014113)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "cellView": "form", + "id": "gfkS3yVRE4_W" + }, + "outputs": [], + "source": [ + "# @markdown Please fill in the value below with your Google Cloud project ID and then run the cell.\n", + "\n", + "PROJECT_ID = \"my-project-id\" # @param {type:\"string\"}\n", + "\n", + "# Set the project id\n", + "!gcloud config set project {PROJECT_ID}" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "f8f2830ee9ca1e01" + }, + "source": [ + "## Basic Usage" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "OMvzMWRrR6n7" + }, + "source": [ + "### Set Cloud SQL database values\n", + "Find your database values, in the [Cloud SQL Instances page](https://console.cloud.google.com/sql?_ga=2.223735448.2062268965.1707700487-2088871159.1707257687)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "irl7eMFnSPZr" + }, + "outputs": [], + "source": [ + "# @title Set Your Values Here { display-mode: \"form\" }\n", + "REGION = \"us-central1\" # @param {type: \"string\"}\n", + "INSTANCE = \"my-mysql-instance\" # @param {type: \"string\"}\n", + "DATABASE = \"my-database\" # @param {type: \"string\"}\n", + "TABLE_NAME = \"vector_store\" # @param {type: \"string\"}" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "QuQigs4UoFQ2" + }, + "source": [ + "### MySQLEngine Connection Pool\n", + "\n", + "One of the requirements and arguments to establish Cloud SQL as a vector store is a `MySQLEngine` object. The `MySQLEngine` configures a connection pool to your Cloud SQL database, enabling successful connections from your application and following industry best practices.\n", + "\n", + "To create a `MySQLEngine` using `MySQLEngine.from_instance()` you need to provide only 4 things:\n", + "\n", + "1. `project_id` : Project ID of the Google Cloud Project where the Cloud SQL instance is located.\n", + "1. `region` : Region where the Cloud SQL instance is located.\n", + "1. `instance` : The name of the Cloud SQL instance.\n", + "1. `database` : The name of the database to connect to on the Cloud SQL instance.\n", + "\n", + "By default, [IAM database authentication](https://cloud.google.com/sql/docs/mysql/iam-authentication#iam-db-auth) will be used as the method of database authentication. This library uses the IAM principal belonging to the [Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/application-default-credentials) sourced from the envionment.\n", + "\n", + "For more informatin on IAM database authentication please see:\n", + "\n", + "* [Configure an instance for IAM database authentication](https://cloud.google.com/sql/docs/mysql/create-edit-iam-instances)\n", + "* [Manage users with IAM database authentication](https://cloud.google.com/sql/docs/mysql/add-manage-iam-users)\n", + "\n", + "Optionally, [built-in database authentication](https://cloud.google.com/sql/docs/mysql/built-in-authentication) using a username and password to access the Cloud SQL database can also be used. Just provide the optional `user` and `password` arguments to `MySQLEngine.from_instance()`:\n", + "\n", + "* `user` : Database user to use for built-in database authentication and login\n", + "* `password` : Database password to use for built-in database authentication and login.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "guVURf1QLL53" + }, + "outputs": [], + "source": [ + "from langchain_google_cloud_sql_mysql import MySQLEngine\n", + "\n", + "engine = MySQLEngine.from_instance(\n", + " project_id=PROJECT_ID, region=REGION, instance=INSTANCE, database=DATABASE\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "D9Xs2qhm6X56" + }, + "source": [ + "### Initialize a table\n", + "The `MySQLVectorStore` class requires a database table. The `MySQLEngine` class has a helper method `init_vectorstore_table()` that can be used to create a table with the proper schema for you." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "avlyHEMn6gzU" + }, + "outputs": [], + "source": [ + "engine.init_vectorstore_table(\n", + " table_name=TABLE_NAME,\n", + " vector_size=768, # Vector size for VertexAI model(textembedding-gecko@latest),\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "aVE5TaJILL53" + }, + "source": [ + "### Create an embedding class instance\n", + "\n", + "You can use any [LangChain embeddings model](https://python.langchain.com/docs/integrations/text_embedding/).\n", + "You may need to enable the Vertex AI API to use `VertexAIEmbeddings`.\n", + "\n", + "We recommend pinning the embedding model's version for production, learn more about the [Text embeddings models](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text-embeddings)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "5utKIdq7KYi5" + }, + "outputs": [], + "source": [ + "# enable Vertex AI API\n", + "!gcloud services enable aiplatform.googleapis.com" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "Vb2RJocV9_LQ" + }, + "outputs": [], + "source": [ + "from langchain_google_vertexai import VertexAIEmbeddings\n", + "\n", + "embedding = VertexAIEmbeddings(\n", + " model_name=\"textembedding-gecko@latest\", project=PROJECT_ID\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "e1tl0aNx7SWy" + }, + "source": [ + "### Initialize a default MySQLVectorStore\n", + "\n", + "To initialize a `MySQLVectorStore` class you need to provide only 3 things:\n", + "\n", + "1. `engine` - An instance of a `MySQLEngine` engine.\n", + "1. `embedding_service` - An instance of a LangChain embedding model.\n", + "1. `table_name` : The name of the table within the Cloud SQL database to use as the vector store." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "z-AZyzAQ7bsf" + }, + "outputs": [], + "source": [ + "from langchain_google_cloud_sql_mysql import MySQLVectorStore\n", + "\n", + "store = MySQLVectorStore(\n", + " engine=engine,\n", + " embedding_service=embedding,\n", + " table_name=TABLE_NAME,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "456LVGttLL54" + }, + "source": [ + "### Add texts" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "nrDvGWIOLL54" + }, + "outputs": [], + "source": [ + "import uuid\n", + "\n", + "all_texts = [\"Apples and oranges\", \"Cars and airplanes\", \"Pineapple\", \"Train\", \"Banana\"]\n", + "metadatas = [{\"len\": len(t)} for t in all_texts]\n", + "ids = [str(uuid.uuid4()) for _ in all_texts]\n", + "\n", + "store.add_texts(all_texts, metadatas=metadatas, ids=ids)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "iCESj2VkLL54" + }, + "source": [ + "### Delete texts\n", + "\n", + "Delete vectors from the vector store by ID." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "ukz6j6hiLL54" + }, + "outputs": [], + "source": [ + "store.delete([ids[1]])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "MOVpfY1lLL54" + }, + "source": [ + "### Search for documents" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "id": "fpqeZgUeLL54", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "f674a3af-452c-4d58-bb62-cbf514a9e1e3" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Pineapple\n" + ] + } + ], + "source": [ + "query = \"I'd like a fruit.\"\n", + "docs = store.similarity_search(query)\n", + "print(docs[0].page_content)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "P9tG2a2gLL54" + }, + "source": [ + "### Search for documents by vector\n", + "\n", + "It is also possible to do a search for documents similar to a given embedding vector using `similarity_search_by_vector` which accepts an embedding vector as a parameter instead of a string." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "N-NC5jgGLL55", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "69a1f9de-a830-450d-8a5e-118b36815a46" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "[Document(page_content='Pineapple', metadata={'len': 9}), Document(page_content='Banana', metadata={'len': 6})]\n" + ] + } + ], + "source": [ + "query_vector = embedding.embed_query(query)\n", + "docs = store.similarity_search_by_vector(query_vector, k=2)\n", + "print(docs)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "KefzU1OCLL55" + }, + "source": [ + "### Add an index\n", + "Speed up vector search queries by applying a vector index. Learn more about [MySQL vector indexes](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/blob/main/src/langchain_google_cloud_sql_mysql/indexes.py)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "r6QOYfh_LL55" + }, + "outputs": [], + "source": [ + "from langchain_google_cloud_sql_mysql import VectorIndex\n", + "\n", + "store.apply_vector_index(VectorIndex())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "3Y-GZ4CvLL55" + }, + "source": [ + "### Remove an index" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "EhloKJoPLL55" + }, + "outputs": [], + "source": [ + "store.drop_vector_index()" + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Advanced Usage" + ], + "metadata": { + "id": "K8XAZZTDqwIp" + } + }, + { + "cell_type": "markdown", + "metadata": { + "id": "A6h77GunLL55" + }, + "source": [ + "### Create a MySQLVectorStore with custom metadata\n", + "\n", + "A vector store can take advantage of relational data to filter similarity searches.\n", + "\n", + "Create a table and `MySQLVectorStore` instance with custom metadata columns." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "eANG7_8qLL55" + }, + "outputs": [], + "source": [ + "from langchain_google_cloud_sql_mysql import Column\n", + "\n", + "# set table name\n", + "CUSTOM_TABLE_NAME = \"vector_store_custom\"\n", + "\n", + "engine.init_vectorstore_table(\n", + " table_name=CUSTOM_TABLE_NAME,\n", + " vector_size=768, # VertexAI model: textembedding-gecko@latest\n", + " metadata_columns=[Column(\"len\", \"INTEGER\")],\n", + ")\n", + "\n", + "\n", + "# initialize MySQLVectorStore with custom metadata columns\n", + "custom_store = MySQLVectorStore(\n", + " engine=engine,\n", + " embedding_service=embedding,\n", + " table_name=CUSTOM_TABLE_NAME,\n", + " metadata_columns=[\"len\"],\n", + " # connect to an existing VectorStore by customizing the table schema:\n", + " # id_column=\"uuid\",\n", + " # content_column=\"documents\",\n", + " # embedding_column=\"vectors\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "bj2d-c2sLL5-" + }, + "source": [ + "### Search for documents with metadata filter\n", + "\n", + "It can be helpful to narrow down the documents before working with them.\n", + "\n", + "For example, documents can be filtered on metadata using the `filter` argument." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "Sqfgk6EOLL5-", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "a10c74e2-fe48-4cf9-ba2f-85aecb2490d0" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "[Document(page_content='Pineapple', metadata={'len': 9}), Document(page_content='Banana', metadata={'len': 6}), Document(page_content='Apples and oranges', metadata={'len': 18}), Document(page_content='Cars and airplanes', metadata={'len': 18})]\n" + ] + } + ], + "source": [ + "import uuid\n", + "\n", + "# add texts to the vector store\n", + "all_texts = [\"Apples and oranges\", \"Cars and airplanes\", \"Pineapple\", \"Train\", \"Banana\"]\n", + "metadatas = [{\"len\": len(t)} for t in all_texts]\n", + "ids = [str(uuid.uuid4()) for _ in all_texts]\n", + "custom_store.add_texts(all_texts, metadatas=metadatas, ids=ids)\n", + "\n", + "# use filter on search\n", + "query_vector = embedding.embed_query(\"I'd like a fruit.\")\n", + "docs = custom_store.similarity_search_by_vector(query_vector, filter=\"len >= 6\")\n", + "\n", + "print(docs)" + ] + } + ], + "metadata": { + "colab": { + "provenance": [], + "toc_visible": true + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From 388f5a4e6b76d23c1e683029c5ea034cfe84bbf7 Mon Sep 17 00:00:00 2001 From: Jack Wotherspoon Date: Mon, 8 Apr 2024 15:45:30 -0400 Subject: [PATCH 22/23] docs: add end-to-end MySQL quickstart (#61) --- samples/langchain_quick_start.ipynb | 1032 +++++++++++++++++++++++++++ 1 file changed, 1032 insertions(+) create mode 100644 samples/langchain_quick_start.ipynb diff --git a/samples/langchain_quick_start.ipynb b/samples/langchain_quick_start.ipynb new file mode 100644 index 0000000..5b356c7 --- /dev/null +++ b/samples/langchain_quick_start.ipynb @@ -0,0 +1,1032 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "6zvUr-Qev6lL" + }, + "outputs": [], + "source": [ + "# Copyright 2024 Google LLC\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ob11AkrStrRI" + }, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/googleapis/langchain-google-cloud-sql-mysql-python/blob/main/samples/langchain_quick_start.ipynb)\n", + "\n", + "---\n", + "# **Introduction**\n", + "\n", + "In this codelab, you'll learn how to create a powerful interactive generative AI application using Retrieval Augmented Generation powered by [Cloud SQL for MySQL](https://cloud.google.com/sql/docs/mysql) and [LangChain](https://www.langchain.com/). We will be creating an application grounded in a [Netflix Movie dataset](https://www.kaggle.com/datasets/shivamb/netflix-shows), allowing you to interact with movie data in exciting new ways." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Ma6pEng3ypbA" + }, + "source": [ + "## Prerequisites\n", + "\n", + "* A basic understanding of the Google Cloud Console\n", + "* Basic skills in command line interface and Google Cloud shell\n", + "* Basic Python knowledge" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "DzDgqJHgysy1" + }, + "source": [ + "## What you'll learn\n", + "\n", + "* How to deploy a Cloud SQL for MySQL instance\n", + "* How to use Cloud SQL for MySQL as a DocumentLoader\n", + "* How to use Cloud SQL for MySQL as a VectorStore\n", + "* How to use Cloud SQL for MySQL for ChatMessageHistory storage" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "FbcZUjT1yvTq" + }, + "source": [ + "## What you'll need\n", + "* A Google Cloud Account and Google Cloud Project\n", + "* A web browser such as [Chrome](https://www.google.com/chrome/)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "vHdR4fF3vLWA" + }, + "source": [ + "# **Setup and Requirements**\n", + "\n", + "In the following instructions you will learn to:\n", + "1. Install required dependencies for our application\n", + "2. Set up authentication for our project\n", + "3. Set up a Cloud SQL for MySQL Instance\n", + "4. Import the data used by our application" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "uy9KqgPQ4GBi" + }, + "source": [ + "## Install dependencies\n", + "First you will need to install the dependencies needed to run this demo app." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "M_ppDxYf4Gqs" + }, + "outputs": [], + "source": [ + "%pip install --upgrade --quiet langchain-google-cloud-sql-mysql langchain-google-vertexai langchain" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "FXK81xEmYU5z" + }, + "source": [ + "**Colab only:** Uncomment the following cell to restart the kernel or use the button to restart the kernel. For Vertex AI Workbench you can restart the terminal using the button on top." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "7gyv1anhYU5z" + }, + "outputs": [], + "source": [ + "# Automatically restart kernel after installs so that your environment can access the new packages\n", + "import IPython\n", + "\n", + "app = IPython.Application.instance()\n", + "app.kernel.do_shutdown(True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "DeUbHclxw7_l" + }, + "source": [ + "## Authenticate to Google Cloud within Colab\n", + "In order to access your Google Cloud Project from this notebook, you will need to Authenticate as an IAM user." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "a168rJE1xDHO" + }, + "outputs": [], + "source": [ + "from google.colab import auth\n", + "\n", + "auth.authenticate_user()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "UCiNGP1Qxd6x" + }, + "source": [ + "## Connect Your Google Cloud Project\n", + "Time to connect your Google Cloud Project to this notebook so that you can leverage Google Cloud from within Colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "cellView": "form", + "id": "qjFuhRhVxlWP", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "be1ad33c-9647-43c0-9722-dbccea26dcae" + }, + "outputs": [], + "source": [ + "# @markdown Please fill in the value below with your GCP project ID and then run the cell.\n", + "\n", + "# Please fill in these values.\n", + "PROJECT_ID = \"\" # @param {type:\"string\"}\n", + "\n", + "# Quick input validations.\n", + "assert PROJECT_ID, \"⚠️ Please provide a Google Cloud project ID\"\n", + "\n", + "# Configure gcloud.\n", + "!gcloud config set project {PROJECT_ID}" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "O-oqMC5Ox-ZM" + }, + "source": [ + "## Configure Your Google Cloud Project\n", + "\n", + "Configure the following in your Google Cloud Project." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "yjMjTGg7YU51" + }, + "source": [ + "1. IAM principal (user, service account, etc.) with the [Cloud SQL Client][client-role] role. The user logged into this notebook will be used as the IAM principal and will be granted the Cloud SQL Client role.\n", + "\n", + "[client-role]: https://cloud.google.com/sql/docs/mysql/roles-and-permissions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "xOnUO-5gYU51" + }, + "outputs": [], + "source": [ + "current_user = !gcloud auth list --filter=status:ACTIVE --format=\"value(account)\"\n", + "!gcloud projects add-iam-policy-binding {PROJECT_ID} \\\n", + " --member=user:{current_user[0]} \\\n", + " --role=\"roles/cloudsql.client\"" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ieb_vqHKYU51" + }, + "source": [ + "2. Enable the APIs for Cloud SQL and Vertex AI within your project." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "CKWrwyfzyTwH" + }, + "outputs": [], + "source": [ + "# Enable GCP services\n", + "!gcloud services enable sqladmin.googleapis.com aiplatform.googleapis.com" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Gn8g7-wCyZU6" + }, + "source": [ + "## Set up Cloud SQL\n", + "You will need a **MySQL** Cloud SQL instance for the following stages of this notebook." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "T616pEOUygYQ" + }, + "source": [ + "### Create a MySQL Instance\n", + "Running the below cell will verify the existence of the Cloud SQL instance and or create a new instance if one does not exist.\n", + "\n", + "A database named `langchain_db` will be created and used for the rest of the quickstart.\n", + "\n", + "**Note:** MySQL vector support is only available on MySQL instances with version **>= 8.0.36**.\n", + "\n", + "> For existing instances, you may need to perform a [self-service maintenance update](MYSQL_8_0_36.R20240401.03_00) to update your maintenance version to **MYSQL_8_0_36.R20240401.03_00** or greater. Once updated, [configure your database flags](https://cloud.google.com/sql/docs/mysql/flags) to have thew new **cloudsql_vector** flag to \"On\".\n", + "\n", + "> ⏳ - Creating a Cloud SQL instance may take a few minutes." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "cellView": "form", + "id": "XXI1uUu3y8gc" + }, + "outputs": [], + "source": [ + "#@markdown Please fill in the both the Google Cloud region and name of your Cloud SQL instance. Once filled in, run the cell.\n", + "\n", + "# Please fill in these values.\n", + "REGION = \"us-central1\" #@param {type:\"string\"}\n", + "INSTANCE_NAME = \"langchain-quickstart-instance\" #@param {type:\"string\"}\n", + "DATABASE_NAME = \"langchain_db\"\n", + "PASSWORD = input(\"Please provide a password to be used for 'root' database user: \")\n", + "\n", + "# Quick input validations.\n", + "assert REGION, \"⚠️ Please provide a Google Cloud region\"\n", + "assert INSTANCE_NAME, \"⚠️ Please provide the name of your instance\"\n", + "assert DATABASE_NAME, \"⚠️ Please provide the name of your database\"\n", + "\n", + "# check if Cloud SQL instance exists in the provided region\n", + "database_version = !gcloud sql instances describe {INSTANCE_NAME} --format=\"value(databaseVersion)\"\n", + "if database_version[0].startswith(\"MYSQL\"):\n", + " print(\"Found existing MySQL Cloud SQL Instance!\")\n", + "else:\n", + " print(\"Creating new Cloud SQL instance...\")\n", + " !gcloud sql instances create {INSTANCE_NAME} --database-version=MYSQL_8_0_36 \\\n", + " --region={REGION} --cpu=1 --memory=4GB --root-password={PASSWORD} \\\n", + " --database-flags=cloudsql_iam_authentication=On,cloudsql_vector=On\n", + "\n", + "databases = !gcloud sql databases list --instance={INSTANCE_NAME} --format=\"value(name)\"\n", + "if DATABASE_NAME not in databases:\n", + " print(\"Creating 'langchain_db' database for the quickstart...\")\n", + " !gcloud sql databases create {DATABASE_NAME} --instance={INSTANCE_NAME}" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "HdolCWyatZmG" + }, + "source": [ + "## Import data to your database\n", + "\n", + "Now that you have your database, you will need to import data! We will be using a [Netflix Dataset from Kaggle](https://www.kaggle.com/datasets/shivamb/netflix-shows). Here is what the data looks like:" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "36-FBKzJ-tLa" + }, + "source": [ + "| show_id | type | title | director | cast | country | date_added | release_year | rating | duration | listed_in | description |\n", + "|---------|---------|----------------------|------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------|---------------|-------------------|--------------|--------|-----------|----------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|\n", + "| s1 | Movie | Dick Johnson Is Dead | Kirsten Johnson | | United States | September 25, 2021 | 2020 | PG-13 | 90 min | Documentaries | As her father nears the end of his life, filmmaker Kirsten Johnson stages his death in inventive and comical ways to help them both face the inevitable. |\n", + "| s2 | TV Show | Blood & Water | | Ama Qamata, Khosi Ngema, Gail Mabalane, Thabang Molaba, Dillon Windvogel, Natasha Thahane, Arno Greeff, Xolile Tshabalala, Getmore Sithole, Cindy Mahlangu, Ryle De Morny, Greteli Fincham, Sello Maake Ka-Ncube, Odwa Gwanya, Mekaila Mathys, Sandi Schultz, Duane Williams, Shamilla Miller, Patrick Mofokeng | South Africa | September 24, 2021 | 2021 | TV-MA | 2 Seasons | International TV Shows, TV Dramas, TV Mysteries | After crossing paths at a party, a Cape Town teen sets out to prove whether a private-school swimming star is her sister who was abducted at birth. |\n", + "| s3 | TV Show | Ganglands | Julien Leclercq | Sami Bouajila, Tracy Gotoas, Samuel Jouy, Nabiha Akkari, Sofia Lesaffre, Salim Kechiouche, Noureddine Farihi, Geert Van Rampelberg, Bakary Diombera | | September 24, 2021 | 2021 | TV-MA | 1 Season | Crime TV Shows, International TV Shows, TV Action & Adventure | To protect his family from a powerful drug lord, skilled thief Mehdi and his expert team of robbers are pulled into a violent and deadly turf war. |\n", + "| s4 | TV Show | Jailbirds New Orleans | | | | September 24, 2021 | 2021 | TV-MA | 1 Season | Docuseries, Reality TV | Feuds, flirtations and toilet talk go down among the incarcerated women at the Orleans Justice Center in New Orleans on this gritty reality series. |\n", + "| s5 | TV Show | Kota Factory | | Mayur More, Jitendra Kumar, Ranjan Raj, Alam Khan, Ahsaas Channa, Revathi Pillai, Urvi Singh, Arun Kumar | India | September 24, 2021 | 2021 | TV-MA | 2 Seasons | International TV Shows, Romantic TV Shows, TV Comedies | In a city of coaching centers known to train India’s finest collegiate minds, an earnest but unexceptional student and his friends navigate campus life. |\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "kQ2KWsYI_Msa" + }, + "source": [ + "You won't need to directly load the csv data into your database. Instead we prepared a table, \"netflix_titles\", in the format of a `.sql` file for MySQL. You can easily import the table into your database with one `gcloud` command." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "qbLhv9jgD8nm" + }, + "outputs": [], + "source": [ + "# Import the Netflix titles table using gcloud command\n", + "import_command_output = !gcloud sql import sql {INSTANCE_NAME} gs://cloud-samples-data/langchain/cloud-sql/mysql/netflix_titles.sql --database={DATABASE_NAME} --quiet\n", + "\n", + "if \"Imported data\" in str(import_command_output):\n", + " print(import_command_output)\n", + "elif \"already exists\" in str(import_command_output):\n", + " print(\"Did not import because the table already existed.\")\n", + "else:\n", + " raise Exception(f\"The import seems to have failed:\\n{import_command_output}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "SsGS80H04bDN" + }, + "source": [ + "# **Use case 1: Cloud SQL for MySQL as a document loader**\n", + "\n", + "Now that you have data in your database, you are ready to use Cloud SQL for MySQL as a [document loader](https://python.langchain.com/docs/modules/data_connection/document_loaders/). This means we will pull data from the database and load it into memory as documents. We can then feed these documents into the vector store." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "-CQgPON8dwSK" + }, + "source": [ + "Next let's connect to our Cloud SQL MySQL instance using the `MySQLEngine` class from the `langchain-google-cloud-sql-mysql` package." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "zrwTsWHMkQ_v" + }, + "outputs": [], + "source": [ + "from langchain_google_cloud_sql_mysql import MySQLEngine\n", + "\n", + "mysql_engine = MySQLEngine.from_instance(\n", + " project_id=PROJECT_ID,\n", + " instance=INSTANCE_NAME,\n", + " region=REGION,\n", + " database=DATABASE_NAME,\n", + " user=\"root\",\n", + " password=PASSWORD,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "8s-C0P-Oee69" + }, + "source": [ + "Once we initialize a MySQLEngine object, we can pass it into the `MySQLLoader` to connect to a specific database. As you can see we also pass in a query, table_name and a list of columns. The query tells the loader what query to use to pull data. The \"content_columns\" argument refers to the columns that will be used as \"content\" in the document object we will later construct. The rest of the columns in that table will become the \"metadata\" associated with the documents." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "2SdFJT6Vece1" + }, + "outputs": [], + "source": [ + "from langchain_google_cloud_sql_mysql import MySQLLoader\n", + "\n", + "table_name = \"netflix_titles\"\n", + "content_columns = [\"title\", \"director\", \"cast\", \"description\"]\n", + "loader = MySQLLoader(\n", + " engine=mysql_engine,\n", + " query=f\"SELECT * FROM `{table_name}`;\",\n", + " content_columns=content_columns,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "dsL-KFrmfuS1" + }, + "source": [ + "Then let's load our documents from our database using our document loader. You can see the first 5 documents from the database here. Nice, you just used Cloud SQL for MySQL as a LangChain document loader!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "t4zTx-HLfwmW", + "outputId": "88dc2827-d735-4ae8-9f1d-db4e0e0c9b63" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Loaded 8807 documents from the database. \n", + "5 Examples:\n", + "page_content='Dick Johnson Is Dead Kirsten Johnson As her father nears the end of his life, filmmaker Kirsten Johnson stages his death in inventive and comical ways to help them both face the inevitable.' metadata={'show_id': 's1', 'type': 'Movie', 'country': 'United States', 'date_added': 'September 25, 2021', 'release_year': 2020, 'rating': 'PG-13', 'duration': '90 min', 'listed_in': 'Documentaries'}\n", + "page_content='Blood & Water Ama Qamata, Khosi Ngema, Gail Mabalane, Thabang Molaba, Dillon Windvogel, Natasha Thahane, Arno Greeff, Xolile Tshabalala, Getmore Sithole, Cindy Mahlangu, Ryle De Morny, Greteli Fincham, Sello Maake Ka-Ncube, Odwa Gwanya, Mekaila Mathys, Sandi Schultz, Duane Williams, Shamilla Miller, Patrick Mofokeng After crossing paths at a party, a Cape Town teen sets out to prove whether a private-school swimming star is her sister who was abducted at birth.' metadata={'show_id': 's2', 'type': 'TV Show', 'country': 'South Africa', 'date_added': 'September 24, 2021', 'release_year': 2021, 'rating': 'TV-MA', 'duration': '2 Seasons', 'listed_in': 'International TV Shows, TV Dramas, TV Mysteries'}\n", + "page_content='Ganglands Julien Leclercq Sami Bouajila, Tracy Gotoas, Samuel Jouy, Nabiha Akkari, Sofia Lesaffre, Salim Kechiouche, Noureddine Farihi, Geert Van Rampelberg, Bakary Diombera To protect his family from a powerful drug lord, skilled thief Mehdi and his expert team of robbers are pulled into a violent and deadly turf war.' metadata={'show_id': 's3', 'type': 'TV Show', 'country': '', 'date_added': 'September 24, 2021', 'release_year': 2021, 'rating': 'TV-MA', 'duration': '1 Season', 'listed_in': 'Crime TV Shows, International TV Shows, TV Action '}\n", + "page_content='Jailbirds New Orleans Feuds, flirtations and toilet talk go down among the incarcerated women at the Orleans Justice Center in New Orleans on this gritty reality series.' metadata={'show_id': 's4', 'type': 'TV Show', 'country': '', 'date_added': 'September 24, 2021', 'release_year': 2021, 'rating': 'TV-MA', 'duration': '1 Season', 'listed_in': 'Docuseries, Reality TV'}\n", + "page_content='Kota Factory Mayur More, Jitendra Kumar, Ranjan Raj, Alam Khan, Ahsaas Channa, Revathi Pillai, Urvi Singh, Arun Kumar In a city of coaching centers known to train India’s finest collegiate minds, an earnest but unexceptional student and his friends navigate campus life.' metadata={'show_id': 's5', 'type': 'TV Show', 'country': 'India', 'date_added': 'September 24, 2021', 'release_year': 2021, 'rating': 'TV-MA', 'duration': '2 Seasons', 'listed_in': 'International TV Shows, Romantic TV Shows, TV Come'}\n" + ] + } + ], + "source": [ + "documents = loader.load()\n", + "print(f\"Loaded {len(documents)} documents from the database. \\n5 Examples:\")\n", + "for doc in documents[:5]:\n", + " print(doc)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "z9uLV3bs4noo" + }, + "source": [ + "# **Use case 2: Cloud SQL for MySQL as a vector store**\n", + "\n", + "---\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "duVsSeMcgEWl" + }, + "source": [ + "Now, let's learn how to put all of the documents we just loaded into a [vector store](https://python.langchain.com/docs/modules/data_connection/vectorstores/) so that we can use vector search to answer our user's questions!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "jfH8oQJ945Ko" + }, + "source": [ + "### Create Your Vector Store table\n", + "\n", + "Based on the documents that we loaded before, we want to create a table with a vector column as our vector store. We will start it by intializing a vector table by calling the `init_vectorstore_table` function from our `engine`. As you can see we list all of the columns for our metadata. We also specify a vector size, 768, that corresponds with the length of the vectors computed by the model our embeddings service uses, Vertex AI's textembedding-gecko.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "e_rmjywG47pv" + }, + "outputs": [], + "source": [ + "from langchain_google_cloud_sql_mysql import Column\n", + "\n", + "vector_table_name = \"vector_netflix_titles\"\n", + "\n", + "mysql_engine.init_vectorstore_table(\n", + " table_name=vector_table_name,\n", + " vector_size=768,\n", + " metadata_columns=[\n", + " Column(\"show_id\", \"VARCHAR(50)\", nullable=True),\n", + " Column(\"type\", \"VARCHAR(50)\", nullable=True),\n", + " Column(\"country\", \"VARCHAR(50)\", nullable=True),\n", + " Column(\"date_added\", \"VARCHAR(50)\", nullable=True),\n", + " Column(\"release_year\", \"INTEGER\", nullable=True),\n", + " Column(\"rating\", \"VARCHAR(50)\", nullable=True),\n", + " Column(\"duration\", \"VARCHAR(50)\", nullable=True),\n", + " Column(\"listed_in\", \"VARCHAR(50)\", nullable=True),\n", + " ],\n", + " overwrite_existing=True, # Enabling this will recreate the table if exists.\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "KG6rwEuJLNIo" + }, + "source": [ + "### Try inserting the documents into the vector table\n", + "\n", + "Now we will create a vector_store object backed by our vector table in the Cloud SQL database. Let's load the data from the documents to the vector table. Note that for each row, the embedding service will be called to compute the embeddings to store in the vector table.\n", + "\n", + "Pricing details can be found [here](https://cloud.google.com/vertex-ai/pricing)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "Wo4-7EYCIFF9" + }, + "outputs": [], + "source": [ + "from langchain_google_vertexai import VertexAIEmbeddings\n", + "from langchain_google_cloud_sql_mysql import MySQLVectorStore\n", + "\n", + "# Initialize the embedding service. In this case we are using version 003 of Vertex AI's textembedding-gecko model.\n", + "# In general, it is good practice to specify the model version used.\n", + "embeddings_service = VertexAIEmbeddings(\n", + " model_name=\"textembedding-gecko@003\", project=PROJECT_ID\n", + ")\n", + "\n", + "vector_store = MySQLVectorStore(\n", + " engine=mysql_engine,\n", + " embedding_service=embeddings_service,\n", + " table_name=vector_table_name,\n", + " metadata_columns=[\n", + " \"show_id\",\n", + " \"type\",\n", + " \"country\",\n", + " \"date_added\",\n", + " \"release_year\",\n", + " \"duration\",\n", + " \"listed_in\",\n", + " ],\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "fr1rP6KQ-8ag" + }, + "source": [ + "Now let's try to put the documents data into the vector table. Here is a code example to load the first 5 documents in the list." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "CTks8Cy--93B" + }, + "outputs": [], + "source": [ + "import uuid\n", + "\n", + "docs_to_load = documents[:5]\n", + "\n", + "# ! Uncomment the following line to load all 8,800+ documents to the database vector table with calling the embedding service.\n", + "# docs_to_load = documents\n", + "\n", + "ids = [str(uuid.uuid4()) for i in range(len(docs_to_load))]\n", + "vector_store.add_documents(docs_to_load, ids)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "29iztdvfL2BN" + }, + "source": [ + "### Import the rest of your data into your vector table\n", + "\n", + "You don't have to call the embedding service 8,800 times to load all the documents for the demo. Instead, we have prepared a table with the all 8,800+ rows with pre-computed embeddings in a `.sql` file. Again, let's import to our DB using `gcloud` command.\n", + "\n", + "It will restore the `.sql` file to a table with vectors called `vector_netflix_titles`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "FEe9El7QMjHi" + }, + "outputs": [], + "source": [ + "# Import the netflix titles with vector table using gcloud command\n", + "import_command_output = !gcloud sql import sql {INSTANCE_NAME} gs://cloud-samples-data/langchain/cloud-sql/mysql/vector_netflix_titles.sql --database={DATABASE_NAME} --quiet\n", + "\n", + "if \"Imported data\" in str(import_command_output):\n", + " print(import_command_output)\n", + "elif \"already exists\" in str(import_command_output):\n", + " print(\"Did not import because the table already existed.\")\n", + "else:\n", + " raise Exception(f\"The import seems failed:\\n{import_command_output}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ZM_OFzZrQEPs" + }, + "source": [ + "# **Use case 3: Cloud SQL for MySQL as Chat Memory**" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "dxqIPQtjDquk" + }, + "source": [ + "Next we will add chat history (called [“memory” in the context of LangChain](https://python.langchain.com/docs/modules/memory/)) to our application so the LLM can retain context and information across multiple interactions, leading to more coherent and sophisticated conversations or text generation. We can use Cloud SQL for MySQL as “memory” storage in our application so that the LLM can use context from prior conversations to better answer the user’s prompts. First let's initialize Cloud SQL for MySQL as memory storage." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "vyYQILyoEAqg" + }, + "outputs": [], + "source": [ + "from langchain_google_cloud_sql_mysql import MySQLChatMessageHistory\n", + "\n", + "message_table_name = \"message_store\"\n", + "\n", + "mysql_engine.init_chat_history_table(table_name=message_table_name)\n", + "\n", + "chat_history = MySQLChatMessageHistory(\n", + " mysql_engine,\n", + " session_id=\"my-test-session\",\n", + " table_name=message_table_name,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "2yuXYLTCl2K1" + }, + "source": [ + "Here is an example of how you would add a user message and how you would add an ai message." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "qDVoTWZal0ZF", + "outputId": "77ac1c76-8f81-4f31-b6f6-1cf3ad104b70" + }, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "[HumanMessage(content='Hi!'),\n", + " AIMessage(content=\"Hello there. I'm a model and am happy to help!\")]" + ] + }, + "metadata": {}, + "execution_count": 14 + } + ], + "source": [ + "chat_history.add_user_message(\"Hi!\")\n", + "chat_history.add_ai_message(\"Hello there. I'm a model and am happy to help!\")\n", + "\n", + "chat_history.messages" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "k0O9mta8RQ0v" + }, + "source": [ + "# **Conversational RAG Chain backed by Cloud SQL for MySQL**" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "j2OxF3JoNA7J" + }, + "source": [ + "So far we've tested with using Cloud SQL for MySQL as document loader, vector store and chat memory. Now let's put it all together with a `ConversationalRetrievalChain`.\n", + "\n", + "We will build a chat bot that can answer movie related questions based on the vector search results.\n", + "\n", + "First let's initialize all of our MySQL engine object to use as a connection in our vector store and chat_history." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "9ukjOO-sNQ8_" + }, + "outputs": [], + "source": [ + "from langchain_google_vertexai import VertexAIEmbeddings, VertexAI\n", + "from langchain.chains import ConversationalRetrievalChain\n", + "from langchain.memory import ConversationSummaryBufferMemory\n", + "from langchain_core.prompts import PromptTemplate\n", + "from langchain_google_cloud_sql_mysql import (\n", + " MySQLChatMessageHistory,\n", + " MySQLEngine,\n", + " MySQLVectorStore,\n", + ")\n", + "\n", + "# Initialize the embedding service\n", + "embeddings_service = VertexAIEmbeddings(\n", + " model_name=\"textembedding-gecko@latest\", project=PROJECT_ID\n", + ")\n", + "\n", + "# Initialize the engine\n", + "mysql_engine = MySQLEngine.from_instance(\n", + " project_id=PROJECT_ID,\n", + " instance=INSTANCE_NAME,\n", + " region=REGION,\n", + " database=DATABASE_NAME,\n", + " user=\"root\",\n", + " password=PASSWORD,\n", + ")\n", + "\n", + "# Initialize the Vector Store\n", + "vector_table_name = \"vector_netflix_titles\"\n", + "vector_store = MySQLVectorStore(\n", + " engine=mysql_engine,\n", + " embedding_service=embeddings_service,\n", + " table_name=vector_table_name,\n", + " metadata_columns=[\n", + " \"show_id\",\n", + " \"type\",\n", + " \"country\",\n", + " \"date_added\",\n", + " \"release_year\",\n", + " \"duration\",\n", + " \"listed_in\",\n", + " ],\n", + ")\n", + "\n", + "# Initialize the MySQLChatMessageHistory\n", + "chat_history = MySQLChatMessageHistory(\n", + " mysql_engine,\n", + " session_id=\"my-test-session\",\n", + " table_name=\"message_store\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Ytlz9D3LmcU7" + }, + "source": [ + "Let's create a prompt for the LLM. Here we can add instructions specific to our application, such as \"Don't make things up\"." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "LoAHNdrWmW9W" + }, + "outputs": [], + "source": [ + "# Prepare some prompt templates for the ConversationalRetrievalChain\n", + "prompt = PromptTemplate(\n", + " template=\"\"\"Use all the information from the context and the conversation history to answer new question. If you see the answer in previous conversation history or the context. \\\n", + "Answer it with clarifying the source information. If you don't see it in the context or the chat history, just say you \\\n", + "didn't find the answer in the given data. Don't make things up.\n", + "\n", + "Previous conversation history from the questioner. \"Human\" was the user who's asking the new question. \"Assistant\" was you as the assistant:\n", + "```{chat_history}\n", + "```\n", + "\n", + "Vector search result of the new question:\n", + "```{context}\n", + "```\n", + "\n", + "New Question:\n", + "```{question}```\n", + "\n", + "Answer:\"\"\",\n", + " input_variables=[\"context\", \"question\", \"chat_history\"],\n", + ")\n", + "condense_question_prompt_passthrough = PromptTemplate(\n", + " template=\"\"\"Repeat the following question:\n", + "{question}\n", + "\"\"\",\n", + " input_variables=[\"question\"],\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "rsGe-bW5m0H1" + }, + "source": [ + "Now let's use our vector store as a retreiver. Retreiver's in Langchain allow us to literally \"retrieve\" documents." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "1nI0xkJamvXt" + }, + "outputs": [], + "source": [ + "# Initialize retriever, llm and memory for the chain\n", + "retriever = vector_store.as_retriever(\n", + " search_type=\"mmr\", search_kwargs={\"k\": 5, \"lambda_mult\": 0.8}\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "3maZ8SLlneYJ" + }, + "source": [ + "Now let's initialize our LLM, in this case we are using Vertex AI's \"gemini-pro\"." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "VBWhg-ihnnxF" + }, + "outputs": [], + "source": [ + "llm = VertexAI(model_name=\"gemini-pro\", project=PROJECT_ID)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "hN8mpXdtnocg" + }, + "source": [ + "We clear our chat history, so that our application starts without any prior context to other conversations we have had with the application." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "1UkPcEpJno5Y" + }, + "outputs": [], + "source": [ + "chat_history.clear()\n", + "\n", + "memory = ConversationSummaryBufferMemory(\n", + " llm=llm,\n", + " chat_memory=chat_history,\n", + " output_key=\"answer\",\n", + " memory_key=\"chat_history\",\n", + " return_messages=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BDAT2koSn8Mz" + }, + "source": [ + "Now let's create a conversational retrieval chain. This will allow the LLM to use chat history in it's responses, meaning we can ask it follow up questions to our questions instead of having to start from scratch after each inquiry." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "7Fu8fKdEn8h8", + "outputId": "593133a2-424a-4551-f637-1e3391405a2b" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Question: What movie was Brad Pitt in?\n", + "Answer: Brad Pitt was in the following movies: Inglourious Basterds, By the Sea, Killing Them Softly, Babel, War Machine\n", + "\n", + "Question: How about Johnny Depp?\n", + "Answer: Johnny Depp was in the following movies: The Rum Diary, Charlie and the Chocolate Factory, The Tourist, The Imaginarium of Doctor Parnassus, What's Eating Gilbert Grape. (Source: vector search result)\n", + "\n", + "Question: Are there movies about animals?\n", + "Answer: Yes, from the vector search result, there are the following movies that feature animals:\n", + "\n", + "- Animals on the Loose: A You vs. Wild Movie\n", + "- Rango\n", + "- Kung Fu Panda: Secrets of the Scroll\n", + "- Balto\n", + "- Cats & Dogs: The Revenge of Kitty Galore\n", + "\n" + ] + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "[HumanMessage(content='What movie was Brad Pitt in?'),\n", + " AIMessage(content='Brad Pitt was in the following movies: Inglourious Basterds, By the Sea, Killing Them Softly, Babel, War Machine'),\n", + " HumanMessage(content='How about Johnny Depp?'),\n", + " AIMessage(content=\"Johnny Depp was in the following movies: The Rum Diary, Charlie and the Chocolate Factory, The Tourist, The Imaginarium of Doctor Parnassus, What's Eating Gilbert Grape. (Source: vector search result)\"),\n", + " HumanMessage(content='Are there movies about animals?'),\n", + " AIMessage(content='Yes, from the vector search result, there are the following movies that feature animals:\\n\\n- Animals on the Loose: A You vs. Wild Movie\\n- Rango\\n- Kung Fu Panda: Secrets of the Scroll\\n- Balto\\n- Cats & Dogs: The Revenge of Kitty Galore')]" + ] + }, + "metadata": {}, + "execution_count": 20 + } + ], + "source": [ + "# create the ConversationalRetrievalChain\n", + "rag_chain = ConversationalRetrievalChain.from_llm(\n", + " llm=llm,\n", + " retriever=retriever,\n", + " verbose=False,\n", + " memory=memory,\n", + " condense_question_prompt=condense_question_prompt_passthrough,\n", + " combine_docs_chain_kwargs={\"prompt\": prompt},\n", + ")\n", + "\n", + "# ask some questions\n", + "q = \"What movie was Brad Pitt in?\"\n", + "ans = rag_chain({\"question\": q, \"chat_history\": chat_history})[\"answer\"]\n", + "print(f\"Question: {q}\\nAnswer: {ans}\\n\")\n", + "\n", + "q = \"How about Johnny Depp?\"\n", + "ans = rag_chain({\"question\": q, \"chat_history\": chat_history})[\"answer\"]\n", + "print(f\"Question: {q}\\nAnswer: {ans}\\n\")\n", + "\n", + "q = \"Are there movies about animals?\"\n", + "ans = rag_chain({\"question\": q, \"chat_history\": chat_history})[\"answer\"]\n", + "print(f\"Question: {q}\\nAnswer: {ans}\\n\")\n", + "\n", + "# browser the chat history\n", + "chat_history.messages" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From ad32137c838483fd051a5187e3df682b89cdc348 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 16:11:43 -0700 Subject: [PATCH 23/23] chore(main): release 0.2.0 (#47) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 19 +++++++++++++++++++ .../version.py | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 94625e6..0100495 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [0.2.0](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/compare/v0.1.0...v0.2.0) (2024-04-08) + + +### Features + +* Add index types for vector search ([#55](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/issues/55)) ([2e30b48](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/commit/2e30b48ad2d1fb11f5f8964808ed5143d9231084)) +* Add MySQLVectorStore initialization methods ([#52](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/issues/52)) ([a1c9411](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/commit/a1c941149e1f1b33991b997e5236c4a7971058fd)) +* Adding search functions and tests ([#56](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/issues/56)) ([5b80694](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/commit/5b806947e5c827ebca553a68ff74a14c7d22a6a5)) +* **ci:** Run tests against multiple versions ([#51](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/issues/51)) ([3439c9d](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/commit/3439c9d6a277a95da835f1c59d4727855a187dee)) +* Support add and delete from MySQLVectorStore ([#53](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/issues/53)) ([ce45617](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/commit/ce45617ae6c9f1b6e539c31e4bcdd47aa7daf964)) + + +### Documentation + +* Add basic MySQLVectorStore usage to README ([#58](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/issues/58)) ([e871c2b](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/commit/e871c2b503fb0d056d7e374394db36e44dcda4c2)) +* Add end-to-end MySQL quickstart ([#61](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/issues/61)) ([388f5a4](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/commit/388f5a4e6b76d23c1e683029c5ea034cfe84bbf7)) +* Add github links ([#46](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/issues/46)) ([54fbab5](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/commit/54fbab5fd41e7b49a2d5da800afad5d3fb66b40c)) +* Add MySQLVectorStore reference notebook ([#59](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/issues/59)) ([0ece837](https://github.com/googleapis/langchain-google-cloud-sql-mysql-python/commit/0ece837e98ff60512d26b5c7c8fb4803e056ad3c)) + ## 0.1.0 (2024-02-22) diff --git a/src/langchain_google_cloud_sql_mysql/version.py b/src/langchain_google_cloud_sql_mysql/version.py index c1c8212..20c5861 100644 --- a/src/langchain_google_cloud_sql_mysql/version.py +++ b/src/langchain_google_cloud_sql_mysql/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.1.0" +__version__ = "0.2.0"