Skip to content

Instantly share code, notes, and snippets.

@oguzalb
Created April 16, 2021 18:17
Show Gist options
  • Save oguzalb/411693ab98bf086432c0a4af74af69c3 to your computer and use it in GitHub Desktop.
Save oguzalb/411693ab98bf086432c0a4af74af69c3 to your computer and use it in GitHub Desktop.
import httpx
content = httpx.get("https://philosophy-quotes-api.glitch.me/quotes/author/Rumi")
quotes = content.json()
print(quotes)
from string import ascii_letters
name_letters = ascii_letters + "_"
NAME_CASES = [("some_name", "some_name"), ("some ", "some"), ("some}", "some")]
def tokenize_name(text, i):
name = ""
while i < len(text):
if text[i] not in name_letters:
# error if name == ""
return name, i
name += text[i]
i += 1
return name, i
for text, result in NAME_CASES:
parsed_result, i = tokenize_name(text, 0)
assert parsed_result == result
from typing import NamedTuple
class Replacement(NamedTuple):
value: str
def tokenize_replacement(text, i):
# {
i += 1
name, i = tokenize_name(text, i)
if i < len(text):
if text[i] != "}":
raise ValueError("expected }")
i += 1
return Replacement(name), i
# }
REPLACEMENT_CASES = [("{some_name}", Replacement("some_name")), ("{some} ", Replacement("some"))]
for text, result in REPLACEMENT_CASES:
parsed_result, i = tokenize_replacement(text, 0)
assert parsed_result == result, (parsed_result, result)
class Other(NamedTuple):
value: str
def tokenize_other(text, i):
other = ""
while i < len(text) and text[i] != "{":
other += text[i]
i += 1
return Other(other), i
OTHER_CASES = [("some_name{", Other("some_name")), ("some", Other("some"))]
for text, result in OTHER_CASES:
parsed_result, i = tokenize_other(text, 0)
assert parsed_result == result
def tokenize(text, i):
tokens = []
while i < len(text):
if text[i] == "{":
replacement, i = tokenize_replacement(text, i)
tokens.append(replacement)
else:
other, i = tokenize_other(text, i)
tokens.append(other)
return tokens, i
FULL_CASES = [
(" {some_name} ", [
Other(" "), Replacement("some_name"), Other(" ")])
]
for text, result in FULL_CASES:
parsed_result, i = tokenize(text, 0)
assert list(parsed_result) == result
template1 = """
something {name} something
"""
def render(template, values):
tokens, _ = tokenize(template, 0)
return "".join(
t.value if isinstance(t, Other) else values[t.value]
for t in tokens
)
print(render(template1, {"name": "hohoho"}))
template_quote = """
quote: {quote}
source: {source}
"""
print(render(template_quote, quotes[0]))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment