Changeset - a84a39f18784
[Not reviewed]
default
0 4 1
Dennis Fink - 2 years ago 2022-03-21 17:39:50
dennis.fink@c3l.lu
Add books
5 files changed with 338 insertions and 12 deletions:
0 comments (0 inline, 0 general)
poetry.lock
Show inline comments
 
[[package]]
 
name = "appdirs"
 
version = "1.4.4"
 
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
 
category = "main"
 
optional = false
 
python-versions = "*"
 

	
 
[[package]]
 
name = "attrs"
 
version = "21.4.0"
 
description = "Classes Without Boilerplate"
 
category = "main"
 
optional = false
 
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
 

	
 
[package.extras]
 
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
 
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
 
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
 
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
 

	
 
[[package]]
 
name = "beautifulsoup4"
 
version = "4.10.0"
 
description = "Screen-scraping library"
 
category = "main"
 
optional = false
 
python-versions = ">3.0.0"
 

	
 
[package.dependencies]
 
soupsieve = ">1.2"
 

	
 
[package.extras]
 
html5lib = ["html5lib"]
 
lxml = ["lxml"]
 

	
 
[[package]]
 
name = "black"
 
version = "22.1.0"
 
@@ -20,6 +57,17 @@ jupyter = ["ipython (>=7.8.0)", "tokeniz
 
uvloop = ["uvloop (>=0.15.2)"]
 

	
 
[[package]]
 
name = "cattrs"
 
version = "1.10.0"
 
description = "Composable complex class support for attrs and dataclasses."
 
category = "main"
 
optional = false
 
python-versions = ">=3.7,<4.0"
 

	
 
[package.dependencies]
 
attrs = ">=20"
 

	
 
[[package]]
 
name = "certifi"
 
version = "2021.10.8"
 
description = "Python package for providing Mozilla's CA Bundle."
 
@@ -77,6 +125,27 @@ optional = false
 
python-versions = ">=3.5"
 

	
 
[[package]]
 
name = "isbnlib"
 
version = "3.10.10"
 
description = "Extract, clean, transform, hyphenate and metadata for ISBNs (International Standard Book Number)."
 
category = "main"
 
optional = false
 
python-versions = "*"
 

	
 
[[package]]
 
name = "isbnlib-worldcat2"
 
version = "0.1.2"
 
description = "An isbnlib plugin for the WorldCat service (https://www.worldcat.org/)."
 
category = "main"
 
optional = false
 
python-versions = "*"
 

	
 
[package.dependencies]
 
beautifulsoup4 = ">=4.7.1"
 
isbnlib = ">=3.9.1"
 
pycountry = ">=1.12.8"
 

	
 
[[package]]
 
name = "isort"
 
version = "5.10.1"
 
description = "A Python utility / library to sort Python imports."
 
@@ -136,6 +205,14 @@ docs = ["Sphinx (>=4)", "furo (>=2021.7.
 
test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
 

	
 
[[package]]
 
name = "pycountry"
 
version = "22.3.5"
 
description = "ISO country, subdivision, language, currency and script definitions and their translations"
 
category = "main"
 
optional = false
 
python-versions = ">=3.6, <4"
 

	
 
[[package]]
 
name = "pygments"
 
version = "2.11.2"
 
description = "Pygments is a syntax highlighting package written in Python."
 
@@ -162,6 +239,33 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)", "w
 
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
 

	
 
[[package]]
 
name = "requests-cache"
 
version = "0.9.3"
 
description = "A transparent persistent cache for the requests library"
 
category = "main"
 
optional = false
 
python-versions = ">=3.7,<4.0"
 

	
 
[package.dependencies]
 
appdirs = ">=1.4.4,<2.0.0"
 
attrs = ">=21.2,<22.0"
 
cattrs = ">=1.8,<2.0"
 
requests = ">=2.22,<3.0"
 
url-normalize = ">=1.4,<2.0"
 
urllib3 = ">=1.25.5,<2.0.0"
 

	
 
[package.extras]
 
dynamodb = ["boto3 (>=1.15,<2.0)", "botocore (>=1.18,<2.0)"]
 
all = ["boto3 (>=1.15,<2.0)", "botocore (>=1.18,<2.0)", "pymongo (>=3,<5)", "redis (>=3,<5)", "itsdangerous (>=2.0,<3.0)", "pyyaml (>=5.4)", "ujson (>=4.0)"]
 
mongodb = ["pymongo (>=3,<5)"]
 
redis = ["redis (>=3,<5)"]
 
bson = ["bson (>=0.5)"]
 
security = ["itsdangerous (>=2.0,<3.0)"]
 
yaml = ["pyyaml (>=5.4)"]
 
json = ["ujson (>=4.0)"]
 
docs = ["furo (>=2021.9.8)", "linkify-it-py (>=1.0.1,<2.0.0)", "myst-parser (>=0.15.1,<0.16.0)", "sphinx (==4.3.0)", "sphinx-autodoc-typehints (>=1.11,<2.0)", "sphinx-automodapi (>=0.13,<0.15)", "sphinx-copybutton (>=0.3,<0.5)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinx-notfound-page", "sphinx-panels (>=0.6,<0.7)", "sphinxcontrib-apidoc (>=0.3,<0.4)"]
 

	
 
[[package]]
 
name = "rich"
 
version = "11.2.0"
 
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
 
@@ -178,6 +282,22 @@ pygments = ">=2.6.0,<3.0.0"
 
jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
 

	
 
[[package]]
 
name = "six"
 
version = "1.16.0"
 
description = "Python 2 and 3 compatibility utilities"
 
category = "main"
 
optional = false
 
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
 

	
 
[[package]]
 
name = "soupsieve"
 
version = "2.3.1"
 
description = "A modern CSS selector implementation for Beautiful Soup."
 
category = "main"
 
optional = false
 
python-versions = ">=3.6"
 

	
 
[[package]]
 
name = "tomli"
 
version = "2.0.0"
 
description = "A lil' TOML parser"
 
@@ -194,6 +314,17 @@ optional = false
 
python-versions = ">=3.6"
 

	
 
[[package]]
 
name = "url-normalize"
 
version = "1.4.3"
 
description = "URL normalization for Python"
 
category = "main"
 
optional = false
 
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
 

	
 
[package.dependencies]
 
six = "*"
 

	
 
[[package]]
 
name = "urllib3"
 
version = "1.26.8"
 
description = "HTTP library with thread-safe connection pooling, file post, and more."
 
@@ -217,9 +348,21 @@ python-versions = ">=2.7"
 
[metadata]
 
lock-version = "1.1"
 
python-versions = "^3.10"
 
content-hash = "e73abda5748e52a1074f136fa035a8e427d57dd92ba4983c81141974a97dc25e"
 
content-hash = "505b76babe9d3e271acda50f879b483fe1e75bbc34e79d498f83efe2f02db178"
 

	
 
[metadata.files]
 
appdirs = [
 
    {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
 
    {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
 
]
 
attrs = [
 
    {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
 
    {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
 
]
 
beautifulsoup4 = [
 
    {file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"},
 
    {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"},
 
]
 
black = [
 
    {file = "black-22.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1297c63b9e1b96a3d0da2d85d11cd9bf8664251fd69ddac068b98dc4f34f73b6"},
 
    {file = "black-22.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2ff96450d3ad9ea499fc4c60e425a1439c2120cbbc1ab959ff20f7c76ec7e866"},
 
@@ -245,6 +388,10 @@ black = [
 
    {file = "black-22.1.0-py3-none-any.whl", hash = "sha256:3524739d76b6b3ed1132422bf9d82123cd1705086723bc3e235ca39fd21c667d"},
 
    {file = "black-22.1.0.tar.gz", hash = "sha256:a7c0192d35635f6fc1174be575cb7915e92e5dd629ee79fdaf0dcfa41a80afb5"},
 
]
 
cattrs = [
 
    {file = "cattrs-1.10.0-py3-none-any.whl", hash = "sha256:35dd9063244263e63bd0bd24ea61e3015b00272cead084b2c40d788b0f857c46"},
 
    {file = "cattrs-1.10.0.tar.gz", hash = "sha256:211800f725cdecedcbcf4c753bbd22d248312b37d130f06045434acb7d9b34e1"},
 
]
 
certifi = [
 
    {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
 
    {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
 
@@ -269,6 +416,14 @@ idna = [
 
    {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
 
    {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
 
]
 
isbnlib = [
 
    {file = "isbnlib-3.10.10-py2.py3-none-any.whl", hash = "sha256:623a09329e8ec7049edf15dd412db042bf4f8236a428bf7a22d84a125584f52d"},
 
    {file = "isbnlib-3.10.10.tar.gz", hash = "sha256:c9e6c1dcaa9dff195429373cf2beb3117f30b3fca43d7db5aec5a2d1f6f59784"},
 
]
 
isbnlib-worldcat2 = [
 
    {file = "isbnlib-worldcat2-0.1.2.tar.gz", hash = "sha256:fd05266a6d58ecb13bea8b9c69e3c4d6871708aa38f79e869a31ce909943ef14"},
 
    {file = "isbnlib_worldcat2-0.1.2-py3-none-any.whl", hash = "sha256:296a4a5d46eb4eab201b5a218a03354d6208ab52d62a563ae39e940c71c1ced7"},
 
]
 
isort = [
 
    {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
 
    {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
 
@@ -307,6 +462,9 @@ platformdirs = [
 
    {file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"},
 
    {file = "platformdirs-2.4.1.tar.gz", hash = "sha256:440633ddfebcc36264232365d7840a970e75e1018d15b4327d11f91909045fda"},
 
]
 
pycountry = [
 
    {file = "pycountry-22.3.5.tar.gz", hash = "sha256:b2163a246c585894d808f18783e19137cb70a0c18fb36748dc01fc6f109c1646"},
 
]
 
pygments = [
 
    {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"},
 
    {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"},
 
@@ -315,10 +473,22 @@ requests = [
 
    {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
 
    {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
 
]
 
requests-cache = [
 
    {file = "requests-cache-0.9.3.tar.gz", hash = "sha256:b32f8afba2439e1b3e12cba511c8f579271eff827f063210d62f9efa5bed6564"},
 
    {file = "requests_cache-0.9.3-py3-none-any.whl", hash = "sha256:d8b32405b2725906aa09810f4796e54cc03029de269381b404c426bae927bada"},
 
]
 
rich = [
 
    {file = "rich-11.2.0-py3-none-any.whl", hash = "sha256:d5f49ad91fb343efcae45a2b2df04a9755e863e50413623ab8c9e74f05aee52b"},
 
    {file = "rich-11.2.0.tar.gz", hash = "sha256:1a6266a5738115017bb64a66c59c717e7aa047b3ae49a011ede4abdeffc6536e"},
 
]
 
six = [
 
    {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
 
    {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
 
]
 
soupsieve = [
 
    {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"},
 
    {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"},
 
]
 
tomli = [
 
    {file = "tomli-2.0.0-py3-none-any.whl", hash = "sha256:b5bde28da1fed24b9bd1d4d2b8cba62300bfb4ec9a6187a957e8ddb9434c5224"},
 
    {file = "tomli-2.0.0.tar.gz", hash = "sha256:c292c34f58502a1eb2bbb9f5bbc9a5ebc37bee10ffb8c2d6bbdfa8eb13cc14e1"},
 
@@ -327,6 +497,10 @@ typing-extensions = [
 
    {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"},
 
    {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"},
 
]
 
url-normalize = [
 
    {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"},
 
    {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"},
 
]
 
urllib3 = [
 
    {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"},
 
    {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"},
pyproject.toml
Show inline comments
 
@@ -9,6 +9,9 @@ python = "^3.10"
 
requests = "^2.27.1"
 
click = "^8.0.3"
 
rich = "^11.2.0"
 
isbnlib = "^3.10.10"
 
isbnlib-worldcat2 = "^0.1.2"
 
requests-cache = "^0.9.3"
 

	
 
[tool.poetry.dev-dependencies]
 
mypy = "^0.931"
stockcli/books.py
Show inline comments
 
new file 100644
 
import logging
 

	
 
import isbnlib
 
from rich.panel import Panel
 
from rich.table import Table
 

	
 
from . import utils
 
from .console import DEFAULT_PADDING, console, int_prompt, prompt
 
from .style import GreenBoldText
 

	
 

	
 
def add_book_by_barcode(barcode: str) -> None:
 

	
 
    canonical_isbn = isbnlib.canonical(barcode)
 
    if not (isbnlib.is_isbn10(canonical_isbn) or isbnlib.is_isbn13(canonical_isbn)):
 
        logging.error(f"{barcode} is not a valid ISBN!")
 
        error_console.print(f"{barcode} is not a valid ISBN!")
 
        return
 

	
 
    userentity = utils.get_request("objects/userentities?query[]=name=books")
 
    userentity_id = userentity[0]["id"]
 

	
 
    all_books = utils.get_request(
 
        f"objects/userobjects?query[]=userentity_id={userentity_id}"
 
    )
 

	
 
    for book in all_books:
 
        book_metadata = utils.get_request(
 
            f"userfields/userentity-books/{book['id']}", cached=True
 
        )
 
        if isbnlib.canonical(book_metadata["isbn"]) == canonical_isbn:
 

	
 
            book_metadata = utils.get_request(
 
                f"userfields/userentity-books/{book['id']}", cached=False
 
            )
 

	
 
            grid = Table.grid(padding=DEFAULT_PADDING)
 
            grid.add_column(justify="right", no_wrap=True)
 
            grid.add_column(justify="left", style="cyan", no_wrap=True)
 
            grid.add_row(GreenBoldText("Title:"), book_metadata["title"])
 
            grid.add_row(GreenBoldText("Amount:"), book_metadata["amount"])
 
            console.print(
 
                Panel(grid, title="[green bold]Book already found[/green bold]")
 
            )
 

	
 
            add_to_amount = bool(
 
                int_prompt.ask("Add? (Enter 0 to abort)", choices=["0", "1"], default=0)
 
            )
 
            if not add_to_amount:
 
                logging.debug("User aborted task!")
 
                return
 
            else:
 
                book_metadata["amount"] = str(int(book_metadata["amount"]) + 1)
 
                response = utils.put_request(
 
                    f"userfields/userentity-books/{book['id']}",
 
                    book_metadata,
 
                    cached=True,
 
                )
 
                console.print("Successfully updated!")
 
                return
 

	
 
    metadata = isbnlib.meta(canonical_isbn, "worldcat")
 

	
 
    grid = Table.grid(padding=DEFAULT_PADDING)
 
    grid.add_column(justify="right", no_wrap=True)
 
    grid.add_column(justify="left", style="cyan", no_wrap=True)
 
    grid.add_row(GreenBoldText("Title:"), metadata["Title"])
 
    grid.add_row(GreenBoldText("Author(s)"), ", ".join(metadata["Authors"]))
 
    console.print(Panel(grid, title="[green bold]Book Info[/green bold]"))
 

	
 
    ok = bool(
 
        int_prompt.ask(
 
            "Is the metadata correct? (Enter 0 to abort)", choices=["0", "1"], default=0
 
        )
 
    )
 
    if not ok:
 
        logging.debug("User aborted task!")
 
        return
 
    new_book_id = utils.post_request(
 
        "objects/userobjects", {"userentity_id": userentity_id}
 
    )["created_object_id"]
 

	
 
    response = utils.put_request(
 
        f"userfields/userentity-books/{new_book_id}",
 
        {
 
            "title": metadata["Title"],
 
            "isbn": isbnlib.mask(canonical_isbn),
 
            "authors": "\n".join(metadata["Authors"]),
 
            "amount": "1",
 
            "categories": None,
 
        },
 
    )
 
    console.print("Successfully added!")
 
    return
 

	
 

	
 
def update_isbn(barcode: str) -> None:
 
    userentity = utils.get_request("objects/userentities?query[]=name=books")
 
    userentity_id = userentity[0]["id"]
 

	
 
    all_books = utils.get_request(
 
        f"objects/userobjects?query[]=userentity_id={userentity_id}"
 
    )
 

	
 
    for book in all_books:
 
        book_metadata = utils.get_request(
 
            f"userfields/userentity-books/{book['id']}", cached=False
 
        )
 
        console.print("Handling")
 
        console.print(book_metadata)
 
        try:
 
            book_metadata["isbn"] = isbnlib.mask(
 
                isbnlib.canonical(book_metadata["isbn"])
 
            )
 
            response = utils.put_request(
 
                f"userfields/userentity-books/{book['id']}",
 
                book_metadata,
 
            )
 
        except:
 
            continue
stockcli/cli.py
Show inline comments
 
import json
 
import logging
 
import logging.config
 
from datetime import timedelta
 
from operator import itemgetter
 

	
 
import click
 
import requests
 
import requests_cache
 
from rich.panel import Panel
 
from rich.table import Table
 

	
 
from .books import add_book_by_barcode, update_isbn
 
from .console import DEFAULT_PADDING, console, int_prompt, prompt
 
from .stock import (
 
    add_by_barcode,
 
@@ -22,6 +25,7 @@ TASK_MAP = {
 
    "2": ("Add stock", add_by_barcode),
 
    "3": ("Update stock", update_by_barcode),
 
    "4": ("Get product info", get_info_by_barcode),
 
    "5": ("Add book", add_book_by_barcode),
 
}
 

	
 

	
 
@@ -48,6 +52,22 @@ def stockcli(ctx: click.Context, configf
 
            "GROCY-API-KEY": config["grocy"]["apikey"],
 
        }
 
    )
 
    ctx.obj["cached_session"] = requests_cache.CachedSession(
 
        "stockcli",
 
        ignored_parameters=["GROCY-API-KEY"],
 
        expire_after=timedelta(days=1),
 
        cache_control=True,
 
        allowable_methods=["GET", "POST"],
 
        allowable_codes=[200, 400],
 
        match_headers=True,
 
        stale_if_error=True,
 
    )
 
    ctx.obj["cached_session"].headers.update(
 
        {
 
            "accept": "application/json",
 
            "GROCY-API-KEY": config["grocy"]["apikey"],
 
        }
 
    )
 
    ctx.obj["base_url"] = f"{config['grocy']['url']}/api/"
 

	
 
    menu = Table.grid(padding=DEFAULT_PADDING)
stockcli/utils.py
Show inline comments
 
@@ -11,9 +11,15 @@ from .console import error_console
 
UNALLOWED_CHARACTERS = str.maketrans(dict((c, None) for c in string.whitespace))
 

	
 

	
 
def make_request(method: str, url_path: str, data: Optional[Any] = None) -> Any:
 
def make_request(
 
    method: str, url_path: str, data: Optional[Any] = None, *, cached: bool = False
 
) -> Any:
 
    obj = click.get_current_context().obj
 
    session = obj["request_session"]
 

	
 
    if cached:
 
        session = obj["cached_session"]
 
    else:
 
        session = obj["request_session"]
 
    base_url = obj["base_url"]
 
    requested_url = base_url + url_path
 

	
 
@@ -49,19 +55,22 @@ def make_request(method: str, url_path: 
 
        error_console.print("Too many redirects!")
 
        raise
 
    else:
 
        return response.json()
 

	
 

	
 
def get_request(url_path: str) -> Any:
 
    return make_request("get", url_path)
 
        try:
 
            return response.json()
 
        except requests.JSONDecodeError:
 
            return response
 

	
 

	
 
def post_request(url_path: str, data: Dict[str, Any]) -> Any:
 
    return make_request("post", url_path, data)
 
def get_request(url_path: str, *, cached: bool = False) -> Any:
 
    return make_request("get", url_path, cached=cached)
 

	
 

	
 
def put_request(url_path: str, data: Dict[str, Any]) -> Any:
 
    return make_request("put", url_path, data)
 
def post_request(url_path: str, data: Dict[str, Any], *, cached: bool = False) -> Any:
 
    return make_request("post", url_path, data, cached=cached)
 

	
 

	
 
def put_request(url_path: str, data: Dict[str, Any], *, cached: bool = False) -> Any:
 
    return make_request("put", url_path, data, cached=cached)
 

	
 

	
 
def prepare_barcode(barcode: str) -> str:
0 comments (0 inline, 0 general)