Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • feat-more-blocks
  • feat-rework-election-page
  • feat/custom-css
  • feat/dary-improvements
  • feat/geo-feature-collections
  • feat/hideable-tweets
  • feat/instagram-feed
  • feat/people-octopus-imports
  • feat/pirstan-changes
  • feat/redesign-fixes-3
  • feat/redesign-improvements-10
  • feat/redesign-improvements-8
  • feat/separate-import-thread
  • feature/crypto-widget
  • features/add-custom-numbering-for-candidates
  • features/add-dynamic-candidate-numbers
  • features/add-embed-to-articles
  • features/add-feature-enlarging-sub-block
  • features/add-link-to-images
  • features/add-pdf-page
  • features/add-redirects
  • features/add-thumbnail-principle-to-uniweb-and-senate
  • features/add-timeline
  • features/add-typed-table
  • features/create-collapsible-extra-legal-info
  • features/create-mastodon-feed-block
  • features/create-wordcloud-from-article-page
  • features/donation-panel-should-be-optional
  • features/extend-hero-banner
  • features/fix-broken-calendar-categories
  • master
  • test
32 results

Target

Select target project
  • to/majak
  • b1242/majak
2 results
Select Git revision
  • feat-more-blocks
  • feat-rework-election-page
  • feat/custom-css
  • feat/dary-improvements
  • feat/geo-feature-collections
  • feat/hideable-tweets
  • feat/instagram-feed
  • feat/people-octopus-imports
  • feat/pirstan-changes
  • feat/redesign-fixes-3
  • feat/redesign-improvements-10
  • feat/redesign-improvements-8
  • feat/separate-import-thread
  • feature/crypto-widget
  • features/add-custom-numbering-for-candidates
  • features/add-dynamic-candidate-numbers
  • features/add-embed-to-articles
  • features/add-feature-enlarging-sub-block
  • features/add-link-to-images
  • features/add-pdf-page
  • features/add-redirects
  • features/add-thumbnail-principle-to-uniweb-and-senate
  • features/add-timeline
  • features/add-typed-table
  • features/create-collapsible-extra-legal-info
  • features/create-mastodon-feed-block
  • features/create-wordcloud-from-article-page
  • features/donation-panel-should-be-optional
  • features/extend-hero-banner
  • features/fix-broken-calendar-categories
  • master
  • test
32 results
Show changes
Showing
with 445 additions and 350 deletions
......@@ -83,7 +83,7 @@
{% include "main/includes/newsletter_section.html" %}
</div>
</main>
<script type="text/javascript">
async function showMoreArticles(event, btn) {
event.preventDefault()
......
......@@ -55,55 +55,51 @@
</div>
</section>
</div>
{% if tweet_list %}
<section class="grid-container no-max mr-0 person-twitter-section mb-4 xl:mb-20">
{% if instagram_post_list %}
<section class="grid-container no-max mr-0 person-instagram-section mb-4 xl:mb-20">
<div class="grid-content-with-right-side">
<h2 class="head-4xl text-left">
Aktuálně na Twitteru
Aktuálně na Instagramu
</h2>
<div class="__js-root twitter-carousel-root xl:max-w-[1145px]">
<ui-twitter-carousel>
{% for tweet in tweet_list %}
<div class="flex max-w-sm max-w-xs w-full h-full">
<div class="__js-root instagram-carousel-root xl:max-w-[1145px]">
<ui-instagram-carousel>
{% for post in instagram_post_list %}
<div class="flex max-w-sm max-w-xs w-full h-[20rem]">
<a
href="https://twitter.com/{{ tweet.author_username }}"
class="group mb-5 h-full w-full flex flex-col align-center overflow-hidden text-center border border-grey-100 relative sm:mb-0 hover:no-underline"
href="{{ post.url }}"
class="group h-full w-full flex flex-col align-center overflow-hidden text-center border border-grey-100 relative hover:no-underline"
>
<div class="md:min-h-[21rem] p-4{% if tweet.image %} opacity-0 group-focus:opacity-100 group-hover:opacity-100 duration-150 z-10{% endif %}">
<div class="md:min-h-[20rem] p-4 opacity-0 group-focus:opacity-100 group-hover:opacity-100 duration-150 z-10">
<div class="flex flex-col items-center">
<div class="mb-4 flex items-center justify-between xl:flex-col gap-3">
<img
class="rounded-full shadow-sm w-12"
src="{{ tweet.author_img.url }}"
alt="Profilový obrázek"
>
<div class="flex flex-col">
<h5 class="font-alt text-xl mb-1 text-left sm:text-center">
{{ tweet.author_name }}
<h5 class="font-alt text-xl mt-3 mb-1 text-left sm:text-center">
{{ post.author_name }}
</h5>
<small class="text-turquoise-400 text-left sm:text-center">
{{ tweet.author_username }}
<small class="text-brands-instagram text-left sm:text-center">
@{{ post.author_username }}
</small>
</div>
</div>
<p class="text-small sm:text-base leading-6 mb-2">
{{ tweet.text|truncatechars:240 }}
{{ post.caption }}
</p>
</div>
</div>
<div class="flex-shrink-0 h-10 mt-auto">
<i class="ico--twitter text-turquoise-400 text-3xl sm:text-xl"></i>
</div>
<div class="absolute inset-0 flex-shrink-0 z-0 duration-150 group-focus:blur-lg group-focus:opacity-25 group-hover:blur-lg group-hover:opacity-25">
<div class="relative">
<div class="absolute left-4 top-4 bg-white rounded-lg p-1.5 drop-shadow-md">
<i class="ico--instagram text-brands-instagram text-2xl"></i>
</div>
{% if tweet.image %}
<div class="absolute inset-0 flex-shrink-0 z-0 duration-150 group-focus:blur-lg group-focus:opacity-25 group-hover:blur-lg group-hover:opacity-25">
<img src="{{ tweet.image.url }}"
class="tweet-image"
alt="Obrázek Tweetu"
<img
class="h-[20rem] object-cover"
src="{{ post.image.url }}"
alt="Obrázek v Instagramovém postu, popis „{{ post.caption }}“"
>
</div>
{% endif %}
</div>
</a>
</div>
{% endfor %}
......
......@@ -47,7 +47,7 @@ INSTALLED_APPS = [
"calendar_utils",
"maps_utils",
"redmine_utils",
"twitter_utils",
"instagram_utils",
"users",
"pirates",
"tuning",
......@@ -311,3 +311,6 @@ MAPS_UTILS_MAPPROXY_URL = env.str(
)
TWITTER_BEARER_TOKEN = env.str("TWITTER_BEARER_TOKEN", default="")
INSTAGRAM_APP_ID = env.str("INSTAGRAM_APP_ID", default="")
INSTAGRAM_APP_SECRET = env.str("INSTAGRAM_APP_SECRET", default="")
wagtail
wagtail-metadata
wagtail-trash
django-environ
django-environ<0.10.0
django-extensions
django-redis
django-settings-export
......@@ -12,7 +12,8 @@ pirates<=0.7
whitenoise
opencv-python
requests
icalevnt
requests-cache
icalevents
ics
arrow
sentry-sdk
......@@ -26,3 +27,4 @@ pypdf2
pyyaml
fastjsonschema
tweepy
requests-cache
......@@ -6,7 +6,7 @@
#
amqp==5.1.1
# via kombu
anyascii==0.3.1
anyascii==0.3.2
# via wagtail
appnope==0.1.3
# via ipython
......@@ -21,7 +21,10 @@ asttokens==2.2.1
async-timeout==4.0.2
# via redis
attrs==22.2.0
# via ics
# via
# cattrs
# ics
# requests-cache
backcall==0.2.0
# via ipython
beautifulsoup4==4.11.2
......@@ -34,6 +37,8 @@ bleach==6.0.0
# via -r base.in
brotli==1.0.9
# via fonttools
cattrs==22.2.0
# via requests-cache
celery==5.2.7
# via -r base.in
certifi==2022.12.7
......@@ -44,7 +49,7 @@ cffi==1.15.1
# via
# cryptography
# weasyprint
charset-normalizer==3.0.1
charset-normalizer==3.1.0
# via requests
click==8.1.3
# via
......@@ -58,18 +63,18 @@ click-plugins==1.1.1
# via celery
click-repl==0.2.0
# via celery
cryptography==39.0.1
cryptography==40.0.1
# via
# josepy
# mozilla-django-oidc
# pyopenssl
cssselect2==0.7.0
# via weasyprint
datetime==4.3
# via icalevnt
datetime==4.9
# via icalevents
decorator==5.1.1
# via ipython
django==4.1.6
django==4.1.8
# via
# django-extensions
# django-filter
......@@ -114,27 +119,29 @@ draftjs-exporter==2.1.7
# via wagtail
et-xmlfile==1.1.0
# via openpyxl
exceptiongroup==1.1.1
# via cattrs
executing==1.2.0
# via stack-data
fastjsonschema==2.16.2
fastjsonschema==2.16.3
# via -r base.in
fonttools[woff]==4.38.0
fonttools[woff]==4.39.3
# via weasyprint
html5lib==1.1
# via
# wagtail
# weasyprint
httplib2==0.20.1
# via icalevnt
icalendar==4.0.8
# via icalevnt
icalevnt==0.1.26
httplib2==0.20.4
# via icalevents
icalendar==4.0.9
# via icalevents
icalevents==0.1.27
# via -r base.in
ics==0.7.2
# via -r base.in
idna==3.4
# via requests
ipython==8.9.0
ipython==8.12.0
# via -r base.in
jedi==0.18.2
# via ipython
......@@ -144,7 +151,7 @@ kombu==5.2.4
# via celery
l18n==2021.3
# via wagtail
markdown==3.4.1
markdown==3.4.3
# via -r base.in
matplotlib-inline==0.1.6
# via ipython
......@@ -156,9 +163,9 @@ oauthlib==3.2.2
# via
# requests-oauthlib
# tweepy
opencv-python==4.7.0.68
opencv-python==4.7.0.72
# via -r base.in
openpyxl==3.1.0
openpyxl==3.1.2
# via wagtail
parso==0.8.3
# via jedi
......@@ -166,18 +173,20 @@ pexpect==4.8.0
# via ipython
pickleshare==0.7.5
# via ipython
pillow==9.4.0
pillow==9.5.0
# via
# django-simple-captcha
# wagtail
# weasyprint
pirates==0.6.0
# via -r base.in
prompt-toolkit==3.0.36
platformdirs==3.2.0
# via requests-cache
prompt-toolkit==3.0.38
# via
# click-repl
# ipython
psycopg2-binary==2.9.5
psycopg2-binary==2.9.6
# via -r base.in
ptyprocess==0.7.0
# via pexpect
......@@ -185,23 +194,23 @@ pure-eval==0.2.2
# via stack-data
pycparser==2.21
# via cffi
pydyf==0.5.0
pydyf==0.6.0
# via weasyprint
pygments==2.14.0
pygments==2.15.0
# via ipython
pyopenssl==23.0.0
pyopenssl==23.1.1
# via josepy
pyparsing==2.4.7
pyparsing==3.0.9
# via httplib2
pypdf2==3.0.1
# via -r base.in
pyphen==0.13.2
pyphen==0.14.0
# via weasyprint
python-dateutil==2.8.2
# via
# arrow
# icalendar
# icalevnt
# icalevents
# ics
pytz==2021.3
# via
......@@ -210,22 +219,25 @@ pytz==2021.3
# django-modelcluster
# djangorestframework
# icalendar
# icalevnt
# icalevents
# l18n
pyyaml==6.0
# via -r base.in
redis==4.5.1
redis==4.5.4
# via django-redis
requests==2.28.2
# via
# -r base.in
# mozilla-django-oidc
# requests-cache
# requests-oauthlib
# tweepy
# wagtail
requests-cache==1.0.1
# via -r base.in
requests-oauthlib==1.3.1
# via tweepy
sentry-sdk==1.15.0
sentry-sdk==1.19.1
# via -r base.in
six==1.16.0
# via
......@@ -236,7 +248,8 @@ six==1.16.0
# ics
# l18n
# python-dateutil
soupsieve==2.3.2.post1
# url-normalize
soupsieve==2.4
# via beautifulsoup4
sqlparse==0.4.3
# via django
......@@ -254,29 +267,32 @@ traitlets==5.9.0
# via
# ipython
# matplotlib-inline
tweepy==4.12.1
tweepy==4.13.0
# via -r base.in
urllib3==1.26.14
url-normalize==1.4.3
# via requests-cache
urllib3==1.26.15
# via
# requests
# requests-cache
# sentry-sdk
vine==5.0.0
# via
# amqp
# celery
# kombu
wagtail==4.2
wagtail==4.2.2
# via
# -r base.in
# wagtail-metadata
# wagtail-trash
wagtail-metadata==4.0.2
wagtail-metadata==4.0.3
# via -r base.in
wagtail-trash==0.3.0
wagtail-trash==1.0.0
# via -r base.in
wcwidth==0.2.6
# via prompt-toolkit
weasyprint==57.2
weasyprint==58.1
# via -r base.in
webencodings==0.5.1
# via
......@@ -284,11 +300,11 @@ webencodings==0.5.1
# cssselect2
# html5lib
# tinycss2
whitenoise==6.3.0
whitenoise==6.4.0
# via -r base.in
willow==1.4.1
# via wagtail
zope-interface==5.5.2
zope-interface==6.0
# via datetime
zopfli==0.2.2
# via fonttools
......
django
django<4.2 # wagtail compatibility
django-debug-toolbar
pytest
pytest-sugar
......
......@@ -6,21 +6,19 @@
#
asgiref==3.6.0
# via django
attrs==22.2.0
# via pytest
coverage[toml]==7.1.0
coverage[toml]==7.2.3
# via pytest-cov
django==4.1.6
django==4.1.8
# via
# -r dev.in
# django-debug-toolbar
django-debug-toolbar==3.8.1
django-debug-toolbar==4.0.0
# via -r dev.in
exceptiongroup==1.1.0
exceptiongroup==1.1.1
# via pytest
factory-boy==3.2.1
# via pytest-factoryboy
faker==16.7.0
faker==18.4.0
# via factory-boy
fastdiff==0.3.0
# via snapshottest
......@@ -30,13 +28,13 @@ inflection==0.5.1
# via pytest-factoryboy
iniconfig==2.0.0
# via pytest
packaging==23.0
packaging==23.1
# via
# pytest
# pytest-sugar
pluggy==1.0.0
# via pytest
pytest==7.2.1
pytest==7.3.0
# via
# -r dev.in
# pytest-cov
......@@ -55,7 +53,7 @@ pytest-freezegun==0.4.2
# via -r dev.in
pytest-mock==3.10.0
# via -r dev.in
pytest-sugar==0.9.6
pytest-sugar==0.9.7
# via -r dev.in
python-dateutil==2.8.2
# via
......@@ -79,7 +77,7 @@ tomli==2.0.1
# via
# coverage
# pytest
typing-extensions==4.4.0
typing-extensions==4.5.0
# via pytest-factoryboy
wasmer==1.1.0
# via fastdiff
......
import datetime
import json
import logging
import re
import typing
import urllib
import requests_cache
from django.core.exceptions import ValidationError
from django.core.files.images import ImageFile
from django.forms.utils import ErrorList
from wagtail import blocks
......@@ -674,6 +678,249 @@ class ChartDataset(blocks.StructBlock):
label = "Zdroj dat"
def get_redmine_projects():
session = requests_cache.CachedSession(
"redmine_cache",
expire_after=datetime.timedelta(hours=1),
)
projects = session.get("https://redmine.pirati.cz/projects.json?limit=10000")
projects.raise_for_status()
projects = projects.json()["projects"]
return [(project["id"], project["name"]) for project in projects]
class ChartRedmineIssueDataset(blocks.StructBlock):
projects = blocks.MultipleChoiceBlock(
label="Projekty", choices=get_redmine_projects
)
is_open = blocks.BooleanBlock(
label="Jen otevřené",
required=False,
)
is_closed = blocks.BooleanBlock(
label="Jen uzavřené",
required=False,
)
created_on_min_date = blocks.DateBlock(label="Min. datum vytvoření", required=True)
created_on_max_date = blocks.DateBlock(label="Max. datum vytvoření", required=True)
updated_on = blocks.CharBlock(
label="Filtr pro datum aktualizace",
max_length=128,
help_text="Např. <=2023-01-01. Více informací na pi2.cz/redmine-api",
required=False,
)
issue_label = blocks.CharBlock(
label="Označení úkolů uvnitř grafu",
max_length=128,
required=True,
)
split_per_project = blocks.BooleanBlock(
label="Rozdělit podle projektu",
required=False,
)
only_grow = blocks.BooleanBlock(
label="Pouze růst nahoru",
required=False,
)
def _get_issues_url(self, value, project_id: typing.Union[None, str, list[str]] = None):
url = "https://redmine.pirati.cz/issues.json"
params = [
("sort", "created_on"),
("limit", "100"),
(
"created_on",
f"><{value['created_on_min_date']}|{value['created_on_max_date']}",
),
]
if isinstance(project_id, str):
params.append(("project_id", project_id))
elif isinstance(project_id, list):
params.append(("project_id", ",".join(project_id)))
is_open = value.get("is_open", False)
is_closed = value.get("is_closed", False)
if is_open and is_closed:
params.append(("status_id", "*"))
elif is_open:
params.append(("status_id", "open"))
elif is_closed:
params.append(("status_id", "closed"))
if value.get("updated_on", "") != "":
params.append(("updated_on", value["updated_on"]))
is_first = True
for param_set in params:
param, param_value = param_set
url += "?" if is_first else "&"
url += f"{param}={urllib.parse.quote(param_value)}"
is_first = False
print(url)
return url
def _get_parsed_issues(self, value, labels, issues_url) -> tuple:
session = requests_cache.CachedSession(
"redmine_cache",
expire_after=datetime.timedelta(days=14),
)
issues_response = session.get(issues_url)
issues_response.raise_for_status()
issues_response = issues_response.json()
only_grow = value.get("only_grow", False)
collected_issues = issues_response["issues"]
offset = 0
while issues_response["total_count"] - offset > len(issues_response["issues"]):
offset += 100
issues_response = session.get(f"{issues_url}&offset={offset}")
issues_response.raise_for_status()
issues_response = issues_response.json()
collected_issues += issues_response["issues"]
ending_position = len(collected_issues) - 1
data = None
current_issue_count = 0
current_label = datetime.date.fromisoformat(
collected_issues[0]["created_on"].split("T")[0]
)
if not only_grow:
data = [0] * len(labels)
for position, issue in enumerate(
collected_issues
): # Assume correct sorting order
created_on_date = datetime.date.fromisoformat(
issue["created_on"].split("T")[0]
)
if current_label != created_on_date or position == ending_position:
data[
labels.index(current_label)
] = current_issue_count # Assume labels are unique
current_label = created_on_date
if position != ending_position:
current_issue_count = 0
else:
data[labels.index(current_label)] = 1
break
current_issue_count += 1
else:
data = []
issue_count_by_date = {}
for position, issue in enumerate(
collected_issues
): # Assume correct sorting order
created_on_date = datetime.date.fromisoformat(
issue["created_on"].split("T")[0]
)
if current_label not in issue_count_by_date:
issue_count_by_date[current_label] = 0
if current_label != created_on_date or position == ending_position:
issue_count_by_date[
current_label
] = current_issue_count # Assume labels are unique
current_label = created_on_date
if position == ending_position:
issue_count_by_date[current_label] = current_issue_count + 1
break
current_issue_count += 1
previous_date = None
for date in labels:
if date not in issue_count_by_date:
if previous_date is None:
data.append(0)
continue
data.append(issue_count_by_date[previous_date])
continue
data.append(issue_count_by_date[date])
previous_date = date
return data
def get_context(self, value) -> list:
context = super().get_context(value)
labels = []
datasets = []
for day_count in range(
(value["created_on_max_date"] - value["created_on_min_date"]).days + 1
):
day = value["created_on_min_date"] + datetime.timedelta(days=day_count)
labels.append(day)
if value.get("split_per_project", False):
project_choices_lookup = dict(get_redmine_projects())
for project_id in value["projects"]:
issues_url = self._get_issues_url(value, project_id)
datasets.append(
{
"label": f"{value['issue_label']} - {project_choices_lookup[int(project_id)]}",
"data": self._get_parsed_issues(value, labels, issues_url),
}
)
else:
issues_url = self._get_issues_url(value, value["projects"])
datasets.append(
{
"label": value["issue_label"],
"data": self._get_parsed_issues(value, labels, issues_url),
}
)
labels = [date.strftime("%d. %m. %Y") for date in labels]
context["parsed_issue_labels"] = labels
context["parsed_issues"] = datasets
return context
class Meta:
label = "Zdroj dat z Redmine (úkoly vytvořené za den)"
help_text = (
"Po prvním otevření se bude stránka otevírat delší dobu, "
"zatímco se na pozadí načítají data do grafu. Poté bude "
"fungovat běžně."
)
class ChartBlock(blocks.StructBlock):
title = blocks.CharBlock(
label="Název",
......@@ -692,32 +939,98 @@ class ChartBlock(blocks.StructBlock):
],
default="bar",
)
labels = blocks.ListBlock(
hide_points = blocks.BooleanBlock(
label="Schovat body",
required=False,
help_text="Mění vzhled pouze u linových grafů.",
)
local_labels = blocks.ListBlock(
blocks.CharBlock(
max_length=40,
label="Skupina",
),
label="Skupiny",
default=[],
blank=True,
required=False,
collapsed=True,
label="Místně definované skupiny",
)
datasets = blocks.ListBlock(
local_datasets = blocks.ListBlock(
ChartDataset(),
label="Zdroje dat",
default=[],
blank=True,
required=False,
collapsed=True,
label="Místní zdroje dat",
)
redmine_issue_datasets = blocks.ListBlock(
ChartRedmineIssueDataset(label="Redmine úkoly"),
default=[],
blank=True,
required=False,
label="Zdroje dat z Redmine (úkoly)",
help_text=(
"Úkoly, podle doby vytvoření. Pokud definuješ "
"více zdrojů, datumy v nich musí být stejné."
),
)
def clean(self, value):
result = super().clean(value)
redmine_issues_exist = len(value.get("redmine_issue_datasets", [])) != 0
if len(value.get("local_datasets", [])) != 0 and redmine_issues_exist:
raise ValidationError(
"Definuj pouze jeden typ zdroje dat - místní, nebo z Redmine."
)
if redmine_issues_exist:
min_date = value["redmine_issue_datasets"][0]["created_on_min_date"]
max_date = value["redmine_issue_datasets"][0]["created_on_max_date"]
if len(value["redmine_issue_datasets"]) > 1:
for dataset in value["redmine_issue_datasets"]:
if (
dataset["created_on_min_date"] != min_date
or dataset["created_on_max_date"] != max_date
):
raise ValidationError(
"Maximální a minimální data všech zdrojů z Redmine musí být stejné"
)
return result
def get_context(self, value, parent_context=None):
context = super().get_context(value, parent_context=parent_context)
datasets = []
labels = []
for dataset in value["datasets"]:
dataset = dict(dataset)
if len(value["local_datasets"]) != 0:
labels = value["local_labels"]
datasets.append(
{"label": dataset["label"], "data": [item for item in dataset["data"]]}
)
for dataset in value["local_datasets"]:
datasets.append(
{
"label": dataset["label"],
"data": [item for item in dataset["data"]],
}
)
elif len(value["redmine_issue_datasets"]) != 0:
for dataset_wrapper in value["redmine_issue_datasets"]:
redmine_context = ChartRedmineIssueDataset().get_context(
dataset_wrapper
)
labels = redmine_context["parsed_issue_labels"]
datasets += redmine_context["parsed_issues"]
value["datasets"] = json.dumps(datasets)
value["labels"] = json.dumps([label for label in value["labels"]])
value["labels"] = json.dumps([label for label in labels])
return context
......
......@@ -13,7 +13,9 @@ class OverwriteStorage(get_storage_class()):
Found at https://djangosnippets.org/snippets/976/
"""
# If the filename already exists, remove it as if it was a true file system
if self.exists(name):
os.remove(os.path.join(settings.MEDIA_ROOT, name))
return name
return name
......@@ -69,7 +69,7 @@
{{ article.title }}
</h1>
</a>
<p class="card-body-text flex-grow{% if article.is_black %} bg-black{% endif %}">
<p class="card-body-text text-ellipsis overflow-hidden h-64 flex-grow{% if article.is_black %} bg-black{% endif %}">
{{ article.perex }}
</p>
<div class="inline-block-nogap mt-4">
......
......@@ -50,6 +50,8 @@
tempDataset["borderColor"] = getColor();
tempDataset["borderWidth"] = 1;
tempDataset["fill"] = true;
tempDataset["tension"] = 0.3;
finalDatasets.push(tempDataset);
}
......@@ -83,7 +85,13 @@
beginAtZero: true,
},
},
},
}{% if value.hide_points %},
elements: {
point: {
radius: 0
}
}
{% endif %}
}
}
);
......
......@@ -3,7 +3,7 @@ from datetime import datetime
from pathlib import Path
import pytest
from icalevnt.icalparser import Event
from icalevents.icalparser import Event
@pytest.fixture(scope="session")
......
from datetime import datetime
from zoneinfo import ZoneInfo
import arrow
import pytest
from icalevnt.icalparser import Event
from icalevents.icalparser import Event
from calendar_utils.parser import (
process_event_list,
......@@ -20,7 +18,7 @@ def test_split_events(sample_response, sample_future_events, sample_past_events)
@pytest.mark.freeze_time("2022-05-13")
def test_split_events(sample_events, sample_future_events, sample_past_events):
def test_split_dist_list(sample_events, sample_future_events, sample_past_events):
past_events, future_events = split_event_dict_list(sample_events)
assert sample_past_events == past_events
assert sample_future_events == future_events
......
from django.conf import settings
from django.core.management.base import BaseCommand
from ...services import TweetDownloadService
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
"--days_back",
default=1,
required=False,
type=int,
help="Stáří tweetů ve dnech",
)
def handle(self, *args, **options):
tds = TweetDownloadService(
bearer_token=settings.TWITTER_BEARER_TOKEN, days_back=options["days_back"]
)
tds.perform_update()
self.stdout.write("\nUpdate of tweets finished!")
# Generated by Django 4.0.7 on 2022-08-19 08:35
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="Tweet",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"author_img_url",
models.URLField(
default="https://pbs.twimg.com/profile_images/1556544269443387394/jSO2A2Fr_200x200.jpg"
),
),
("author_name", models.CharField(default="Piráti", max_length=128)),
(
"author_username",
models.CharField(default="PiratskaStrana", max_length=128),
),
("text", models.TextField()),
("twitter_id", models.CharField(max_length=32, unique=True)),
],
),
]
......@@ -2,7 +2,7 @@
from django.db import migrations, models
import twitter_utils.storages
import shared.storages
class Migration(migrations.Migration):
......@@ -20,7 +20,7 @@ class Migration(migrations.Migration):
name="author_img",
field=models.ImageField(
null=True,
storage=twitter_utils.storages.OverwriteStorage,
storage=shared.storages.OverwriteStorage,
upload_to="twitter_accounts",
),
),
......
......@@ -2,7 +2,7 @@
from django.db import migrations, models
import twitter_utils.storages
import shared.storages
class Migration(migrations.Migration):
......@@ -15,7 +15,7 @@ class Migration(migrations.Migration):
model_name="tweet",
name="author_img",
field=models.ImageField(
storage=twitter_utils.storages.OverwriteStorage,
storage=shared.storages.OverwriteStorage,
upload_to="twitter_accounts",
),
),
......
from django.db import models
from twitter_utils.storages import OverwriteStorage
from shared.storages import OverwriteStorage
class TweetQueryset(models.QuerySet):
......
import logging
import os
from datetime import timedelta
from typing import TYPE_CHECKING
from urllib import request
from django.core.files import File
from django.utils import timezone
from tweepy import Client
from tweepy.errors import BadRequest
from main.models import MainHomePage, MainPersonPage
from .models import Tweet
if TYPE_CHECKING:
from tweepy import Media
from tweepy import Tweet as TweetResponse
from tweepy import User
logger = logging.getLogger()
class TweetDownloadService:
"""
Service class starající se o update tweetů z Twitter API, v současné chvíli
bere tweety z účtu nastavených v (první) MainHomePage stránce (HP pirati.cz).
"""
client: Client
days_back: int
def __init__(self, bearer_token, days_back=1):
if not bearer_token:
raise RuntimeError("Twitter bearer token not set, cannot update tweets")
self.client = Client(bearer_token=bearer_token)
self.days_back = days_back
@staticmethod
def download_remote_image(image_url) -> (str, File):
try:
response = request.urlretrieve(image_url)
except Exception as exc:
logger.warning(exc)
return "", None
return os.path.basename(image_url), File(open(response[0], "rb"))
@staticmethod
def get_existing_tweet_id_list() -> list[int]:
"""
Vrací IDs už uložených Tweetů - možná by stálo za to brát jen z určitého
časového období...
"""
return Tweet.objects.values_list("twitter_id", flat=True)
@staticmethod
def get_tweet_media_url(media_key, media_list):
return next(m.url for m in media_list if m.media_key == media_key)
def get_tweets_response(self, user_id) -> (list["TweetResponse"], list["Media"]):
"""
Vrací list tweetů (objektů) pro daného Twitter uživatele.
"""
tweets_response = self.client.get_users_tweets(
user_id,
exclude=["retweets"],
expansions=[
"author_id",
"attachments.media_keys",
"entities.mentions.username",
],
max_results=100,
media_fields=["url"], # TODO use this? download need probably
start_time=timezone.now() - timedelta(days=self.days_back),
tweet_fields=["author_id", "created_at", "in_reply_to_user_id"],
user_fields=["name", "username"],
)
return tweets_response.data or [], tweets_response[1].get("media", [])
def get_user_list_data(self) -> list["User"]:
twitter_usernames_block = MainHomePage.objects.first().twitter_usernames
person_username_list = (
MainPersonPage.objects.filter(twitter_username__isnull=False)
.values_list("twitter_username", flat=True)
.distinct()
)
homepage_username_list = [
username_data["value"] for username_data in twitter_usernames_block.raw_data
]
# kvůli duplicitám udělám list/set/list konverzi
username_list = list({*person_username_list, *homepage_username_list})
user_data_list = []
for username in username_list:
try:
user_data_list.append(self.get_user_response(username))
except BadRequest:
logger.error(
"Cannot download tweets for the username",
extra={"username": username},
)
return user_data_list
def get_user_response(self, username) -> "User":
"""
Vrací informace o daném uživateli.
"""
user_response = self.client.get_user(
username=username,
user_fields=["profile_image_url"], # id, name, username enabled by default
)
return user_response.data
def perform_update(self) -> int:
"""
Obaluje celý proces downloadu Tweetů z API do DB.
"""
existing_tweet_id_list = self.get_existing_tweet_id_list()
user_data_list = self.get_user_list_data()
tweets_to_save = []
for user_data in user_data_list:
tweet_resp_list, media_list = self.get_tweets_response(user_id=user_data.id)
for tweet_response in tweet_resp_list:
if (
# tweet již načten, nebo je odpověď
str(tweet_response.id) in existing_tweet_id_list
or tweet_response.in_reply_to_user_id is not None
):
continue
# vyzobej data z responses
tweet = Tweet(
author_name=user_data.name,
author_username=user_data.username,
text=tweet_response.text.split("https://t.co")[0],
twitter_id=str(tweet_response.id),
)
# ulož obrázek Twitter účtu do media
tweet.author_img.save(
*self.download_remote_image(user_data.profile_image_url),
False, # to prevent model save before bulk create
)
# zkus dohledat obrázek pro Tweet
if tweet_response.attachments:
self.try_find_image_for_tweet(tweet, tweet_response, media_list)
# přidej do seznamu k uložení
tweets_to_save.append(tweet)
return Tweet.objects.bulk_create(tweets_to_save)
def try_find_image_for_tweet(
self, tweet: Tweet, tweet_response: "TweetResponse", media_list: list["Media"]
):
tweet_media_keys = tweet_response.attachments.get("media_keys", [])
if tweet_media_keys:
img_url = self.get_tweet_media_url(tweet_media_keys[0], media_list)
if img_url: # ne vždycky je obrázek v media_listu...
tweet.image.save(
*self.download_remote_image(image_url=img_url),
False, # to prevent model save before bulk create
)
from wagtail.blocks import (
CharBlock,
ListBlock,
PageChooserBlock,
StructBlock,
URLBlock,
)
from wagtail.blocks import CharBlock, ListBlock, PageChooserBlock, StructBlock, URLBlock
class PersonUrlBlock(StructBlock):
......@@ -39,4 +33,4 @@ class PeopleGroupListBlock(StructBlock):
class Meta:
template = "uniweb/blocks/people_group_block.html"
icon = "list-ul"
label = "Skupina členů"
label = "Skupina členů"