Skip to content

Commit 55a758f

Browse files
authored
Consolidate ops class initialization (#2117)
Fixes #2111 The background job and operator entrypoints now use a shared function that initalizes and returns the ops classes. This is not applied in the main entrypoint as that also initializes the backend API, which we don't want in the other entrypoints.
1 parent 0dc025e commit 55a758f

File tree

3 files changed

+137
-154
lines changed

3 files changed

+137
-154
lines changed

backend/btrixcloud/main_bg.py

+3-90
Original file line numberDiff line numberDiff line change
@@ -6,25 +6,9 @@
66
import traceback
77
from uuid import UUID
88

9-
from .crawlmanager import CrawlManager
10-
from .db import init_db
11-
from .emailsender import EmailSender
12-
13-
# from .utils import register_exit_handler
149
from .models import BgJobType
10+
from .ops import init_ops
1511

16-
from .basecrawls import BaseCrawlOps
17-
from .invites import InviteOps
18-
from .users import init_user_manager
19-
from .orgs import OrgOps
20-
from .colls import CollectionOps
21-
from .crawlconfigs import CrawlConfigOps
22-
from .crawls import CrawlOps
23-
from .profiles import ProfileOps
24-
from .storages import StorageOps
25-
from .webhooks import EventWebhookOps
26-
from .background_jobs import BackgroundJobOps
27-
from .pages import PageOps
2812

2913
job_type = os.environ.get("BG_JOB_TYPE")
3014
oid = os.environ.get("OID")
@@ -33,19 +17,7 @@
3317
# ============================================================================
3418
# pylint: disable=too-many-function-args, duplicate-code, too-many-locals
3519
async def main():
36-
"""main init"""
37-
email = EmailSender()
38-
crawl_manager = None
39-
40-
dbclient, mdb = init_db()
41-
42-
invite_ops = InviteOps(mdb, email)
43-
44-
user_manager = init_user_manager(mdb, email, invite_ops)
45-
46-
org_ops = OrgOps(mdb, invite_ops, user_manager)
47-
48-
event_webhook_ops = EventWebhookOps(mdb, org_ops)
20+
"""run background job with access to ops classes"""
4921

5022
# pylint: disable=import-outside-toplevel
5123
if not os.environ.get("KUBERNETES_SERVICE_HOST"):
@@ -55,66 +27,7 @@ async def main():
5527
)
5628
sys.exit(1)
5729

58-
crawl_manager = CrawlManager()
59-
60-
storage_ops = StorageOps(org_ops, crawl_manager)
61-
62-
background_job_ops = BackgroundJobOps(
63-
mdb, email, user_manager, org_ops, crawl_manager, storage_ops
64-
)
65-
66-
profile_ops = ProfileOps(
67-
mdb, org_ops, crawl_manager, storage_ops, background_job_ops
68-
)
69-
70-
crawl_config_ops = CrawlConfigOps(
71-
dbclient,
72-
mdb,
73-
user_manager,
74-
org_ops,
75-
crawl_manager,
76-
profile_ops,
77-
)
78-
79-
coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops)
80-
81-
base_crawl_ops = BaseCrawlOps(
82-
mdb,
83-
user_manager,
84-
org_ops,
85-
crawl_config_ops,
86-
coll_ops,
87-
storage_ops,
88-
event_webhook_ops,
89-
background_job_ops,
90-
)
91-
92-
crawl_ops = CrawlOps(
93-
crawl_manager,
94-
mdb,
95-
user_manager,
96-
org_ops,
97-
crawl_config_ops,
98-
coll_ops,
99-
storage_ops,
100-
event_webhook_ops,
101-
background_job_ops,
102-
)
103-
104-
page_ops = PageOps(mdb, crawl_ops, org_ops, storage_ops)
105-
106-
base_crawl_ops.set_page_ops(page_ops)
107-
crawl_ops.set_page_ops(page_ops)
108-
109-
background_job_ops.set_ops(crawl_ops, profile_ops)
110-
111-
org_ops.set_ops(base_crawl_ops, profile_ops, coll_ops, background_job_ops)
112-
113-
user_manager.set_ops(org_ops, crawl_config_ops, base_crawl_ops)
114-
115-
background_job_ops.set_ops(base_crawl_ops, profile_ops)
116-
117-
crawl_config_ops.set_coll_ops(coll_ops)
30+
(org_ops, _, _, _, _, _, _, _, _, _, user_manager) = init_ops()
11831

11932
# Run job
12033
if job_type == BgJobType.DELETE_ORG:

backend/btrixcloud/main_op.py

+10-64
Original file line numberDiff line numberDiff line change
@@ -5,43 +5,18 @@
55

66
from fastapi import FastAPI
77

8-
from .crawlmanager import CrawlManager
9-
from .db import init_db
10-
from .emailsender import EmailSender
118
from .operator import init_operator_api
9+
from .ops import init_ops
1210
from .utils import register_exit_handler
1311

14-
from .invites import InviteOps
15-
from .users import init_user_manager
16-
from .orgs import OrgOps
17-
from .colls import CollectionOps
18-
from .crawlconfigs import CrawlConfigOps
19-
from .crawls import CrawlOps
20-
from .profiles import ProfileOps
21-
from .storages import init_storages_api
22-
from .webhooks import EventWebhookOps
23-
from .background_jobs import BackgroundJobOps
24-
from .pages import PageOps
2512

2613
app_root = FastAPI()
2714

2815

2916
# ============================================================================
3017
# pylint: disable=too-many-function-args, duplicate-code
3118
def main():
32-
"""main init"""
33-
email = EmailSender()
34-
crawl_manager = None
35-
36-
dbclient, mdb = init_db()
37-
38-
invite_ops = InviteOps(mdb, email)
39-
40-
user_manager = init_user_manager(mdb, email, invite_ops)
41-
42-
org_ops = OrgOps(mdb, invite_ops, user_manager)
43-
44-
event_webhook_ops = EventWebhookOps(mdb, org_ops)
19+
"""init operator"""
4520

4621
# pylint: disable=import-outside-toplevel
4722
if not os.environ.get("KUBERNETES_SERVICE_HOST"):
@@ -51,48 +26,19 @@ def main():
5126
)
5227
sys.exit(1)
5328

54-
crawl_manager = CrawlManager()
55-
56-
storage_ops = init_storages_api(org_ops, crawl_manager)
57-
58-
background_job_ops = BackgroundJobOps(
59-
mdb, email, user_manager, org_ops, crawl_manager, storage_ops
60-
)
61-
62-
profile_ops = ProfileOps(
63-
mdb, org_ops, crawl_manager, storage_ops, background_job_ops
64-
)
65-
66-
crawl_config_ops = CrawlConfigOps(
67-
dbclient,
68-
mdb,
69-
user_manager,
70-
org_ops,
71-
crawl_manager,
72-
profile_ops,
73-
)
74-
75-
user_manager.set_ops(org_ops, crawl_config_ops, None)
76-
77-
coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops)
78-
79-
crawl_ops = CrawlOps(
80-
crawl_manager,
81-
mdb,
82-
user_manager,
29+
(
8330
org_ops,
8431
crawl_config_ops,
32+
_,
33+
crawl_ops,
34+
page_ops,
8535
coll_ops,
36+
_,
8637
storage_ops,
87-
event_webhook_ops,
8838
background_job_ops,
89-
)
90-
91-
page_ops = PageOps(mdb, crawl_ops, org_ops, storage_ops)
92-
93-
crawl_ops.set_page_ops(page_ops)
94-
95-
background_job_ops.set_ops(crawl_ops, profile_ops)
39+
event_webhook_ops,
40+
_,
41+
) = init_ops()
9642

9743
return init_operator_api(
9844
app_root,

backend/btrixcloud/ops.py

+124
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
""" shared helper to initialize ops classes """
2+
3+
from typing import Tuple
4+
5+
from .crawlmanager import CrawlManager
6+
from .db import init_db
7+
from .emailsender import EmailSender
8+
9+
from .background_jobs import BackgroundJobOps
10+
from .basecrawls import BaseCrawlOps
11+
from .colls import CollectionOps
12+
from .crawls import CrawlOps
13+
from .crawlconfigs import CrawlConfigOps
14+
from .invites import InviteOps
15+
from .orgs import OrgOps
16+
from .pages import PageOps
17+
from .profiles import ProfileOps
18+
from .storages import StorageOps
19+
from .users import UserManager
20+
from .webhooks import EventWebhookOps
21+
22+
23+
# pylint: disable=too-many-locals
24+
def init_ops() -> Tuple[
25+
OrgOps,
26+
CrawlConfigOps,
27+
BaseCrawlOps,
28+
CrawlOps,
29+
PageOps,
30+
CollectionOps,
31+
ProfileOps,
32+
StorageOps,
33+
BackgroundJobOps,
34+
EventWebhookOps,
35+
UserManager,
36+
]:
37+
"""Initialize and return ops classes"""
38+
email = EmailSender()
39+
40+
dbclient, mdb = init_db()
41+
42+
invite_ops = InviteOps(mdb, email)
43+
44+
user_manager = UserManager(mdb, email, invite_ops)
45+
46+
org_ops = OrgOps(mdb, invite_ops, user_manager)
47+
48+
event_webhook_ops = EventWebhookOps(mdb, org_ops)
49+
50+
crawl_manager = CrawlManager()
51+
52+
storage_ops = StorageOps(org_ops, crawl_manager)
53+
54+
background_job_ops = BackgroundJobOps(
55+
mdb, email, user_manager, org_ops, crawl_manager, storage_ops
56+
)
57+
58+
profile_ops = ProfileOps(
59+
mdb, org_ops, crawl_manager, storage_ops, background_job_ops
60+
)
61+
62+
crawl_config_ops = CrawlConfigOps(
63+
dbclient,
64+
mdb,
65+
user_manager,
66+
org_ops,
67+
crawl_manager,
68+
profile_ops,
69+
)
70+
71+
coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops)
72+
73+
base_crawl_ops = BaseCrawlOps(
74+
mdb,
75+
user_manager,
76+
org_ops,
77+
crawl_config_ops,
78+
coll_ops,
79+
storage_ops,
80+
event_webhook_ops,
81+
background_job_ops,
82+
)
83+
84+
crawl_ops = CrawlOps(
85+
crawl_manager,
86+
mdb,
87+
user_manager,
88+
org_ops,
89+
crawl_config_ops,
90+
coll_ops,
91+
storage_ops,
92+
event_webhook_ops,
93+
background_job_ops,
94+
)
95+
96+
page_ops = PageOps(mdb, crawl_ops, org_ops, storage_ops)
97+
98+
base_crawl_ops.set_page_ops(page_ops)
99+
100+
crawl_ops.set_page_ops(page_ops)
101+
102+
background_job_ops.set_ops(crawl_ops, profile_ops)
103+
104+
org_ops.set_ops(base_crawl_ops, profile_ops, coll_ops, background_job_ops)
105+
106+
user_manager.set_ops(org_ops, crawl_config_ops, base_crawl_ops)
107+
108+
background_job_ops.set_ops(base_crawl_ops, profile_ops)
109+
110+
crawl_config_ops.set_coll_ops(coll_ops)
111+
112+
return (
113+
org_ops,
114+
crawl_config_ops,
115+
base_crawl_ops,
116+
crawl_ops,
117+
page_ops,
118+
coll_ops,
119+
profile_ops,
120+
storage_ops,
121+
background_job_ops,
122+
event_webhook_ops,
123+
user_manager,
124+
)

0 commit comments

Comments
 (0)