Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions api/base/settings/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,13 +325,16 @@
},
'osfmetrics_es8': {
'elasticsearch_metrics.imps.elastic8': {
# passthru kwargs to elasticsearch8 connection constructor
'hosts': osf_settings.ELASTIC8_URI,
'ca_certs': osf_settings.ELASTIC8_CERT_PATH,
'basic_auth': (
(osf_settings.ELASTIC8_USERNAME, osf_settings.ELASTIC8_SECRET)
if osf_settings.ELASTIC8_SECRET is not None
else None
),
# djelme-specific kwargs
'djelme_default_index_name_prefix': osf_settings.SHARE_PROVIDER_PREPEND,
},
},
}
Expand Down
2 changes: 2 additions & 0 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ def pytest_configure(config):
'transitions.core',
'MARKDOWN',
'elasticsearch',
'elastic_transport',
'elasticsearch_metrics',
]
for logger_name in SILENT_LOGGERS:
logging.getLogger(logger_name).setLevel(logging.CRITICAL)
Expand Down
12 changes: 9 additions & 3 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,17 @@ services:
# Temporary: Remove when we've upgraded to ES6
elasticsearch6:
image: docker.elastic.co/elasticsearch/elasticsearch:6.3.1
environment:
- ES_JAVA_OPTS=-Xms512m -Xmx512m # reduce memory usage
ports:
- 9201:9200
volumes:
- elasticsearch6_data_vol:/usr/share/elasticsearch/data
healthcheck:
start_period: 15s
test: curl -s http://localhost:9200/_cluster/health | grep -vq '"status":"red"'
interval: 10s
retries: 30
stdin_open: true

elasticsearch8:
Expand All @@ -91,10 +98,9 @@ services:
- elasticsearch8_data_vol:/usr/share/elasticsearch/data
healthcheck:
start_period: 15s
test: ["CMD", "curl", "-sf", "http://localhost:9200/_cluster/health?wait_for_status=yellow&timeout=30s"]
test: curl -s http://localhost:9200/_cluster/health | grep -vq '"status":"red"'
interval: 10s
timeout: 30s
retries: 5
retries: 30
stdin_open: true

postgres:
Expand Down
2 changes: 2 additions & 0 deletions framework/celery_tasks/routers.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ def match_by_module(task_path):
return CeleryConfig.task_med_queue
if task_subpath in CeleryConfig.high_pri_modules:
return CeleryConfig.task_high_queue
if task_subpath in CeleryConfig.background_migration_modules:
return CeleryConfig.task_background_migration_queue
if task_subpath in CeleryConfig.remote_computing_modules:
return CeleryConfig.task_remote_computing_queue
if task_subpath in CeleryConfig.account_status_changes_modules:
Expand Down
19 changes: 19 additions & 0 deletions osf/management/commands/fake_metrics_reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
UserSummaryReport,
PreprintSummaryReport,
)
from osf.metrics.reports import PublicItemUsageReport
from osf.metrics.utils import YearMonth
from osf.models import PreprintProvider


Expand Down Expand Up @@ -53,10 +55,27 @@ def fake_preprint_counts(days_back):
).save()


def fake_usage_reports(osfid: str, count: int):
_ym = YearMonth.from_date(date.today()).prior()
for _months in range(count):
PublicItemUsageReport.record(
item_osfid=osfid,
report_yearmonth=_ym,
view_count=(_vc := randint(0, 500)),
view_session_count=randint(0, _vc),
download_count=(_dc := randint(0, 300)),
download_session_count=randint(0, _dc),
)
_ym = _ym.prior()


class Command(BaseCommand):
def handle(self, *args, **kwargs):
if not settings.DEBUG:
raise NotImplementedError('fake_reports requires DEBUG mode')
fake_user_counts(1000)
fake_preprint_counts(1000)
fake_usage_reports('blarg', 100)
fake_usage_reports('blerg', 50)
fake_usage_reports('bleg', 50)
# TODO: more reports
Loading
Loading