Skip to content

Commit

Permalink
chore(refactor): Move JsonResource function into caller
Browse files Browse the repository at this point in the history
  • Loading branch information
jpmckinney committed Jul 19, 2024
1 parent 6a21c9a commit 2179bea
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 21 deletions.
19 changes: 0 additions & 19 deletions scrapyd/utils.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,16 @@
import json
import os
import sys
from subprocess import PIPE, Popen
from typing import ClassVar
from urllib.parse import urlsplit

from scrapy.utils.misc import load_object
from twisted.web import resource

from scrapyd.config import Config
from scrapyd.exceptions import RunnerError
from scrapyd.sqlite import JsonSqliteDict


class JsonResource(resource.Resource):
json_encoder = json.JSONEncoder()

def render(self, txrequest):
r = resource.Resource.render(self, txrequest)
return self.encode_object(r, txrequest)

def encode_object(self, obj, txrequest):
r = "" if obj is None else self.json_encoder.encode(obj) + "\n"
txrequest.setHeader("Content-Type", "application/json")
txrequest.setHeader("Access-Control-Allow-Origin", "*")
txrequest.setHeader("Access-Control-Allow-Methods", "GET, POST, PATCH, PUT, DELETE")
txrequest.setHeader("Access-Control-Allow-Headers", " X-Requested-With")
txrequest.setHeader("Content-Length", str(len(r)))
return r


class UtilsCache:
# array of project name that need to be invalided
invalid_cached_projects: ClassVar = []
Expand Down
22 changes: 20 additions & 2 deletions scrapyd/webservice.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import functools
import json
import sys
import traceback
import uuid
Expand All @@ -9,11 +10,11 @@
from io import BytesIO

from twisted.python import log
from twisted.web import error, http
from twisted.web import error, http, resource

from scrapyd.exceptions import EggNotFoundError, ProjectNotFoundError
from scrapyd.jobstorage import job_items_url, job_log_url
from scrapyd.utils import JsonResource, UtilsCache, get_spider_list, native_stringify_dict
from scrapyd.utils import UtilsCache, get_spider_list, native_stringify_dict


def param(
Expand Down Expand Up @@ -52,6 +53,23 @@ def wrapper(self, txrequest, *args, **kwargs):
return decorator


class JsonResource(resource.Resource):
json_encoder = json.JSONEncoder()

def render(self, txrequest):
r = resource.Resource.render(self, txrequest)
return self.encode_object(r, txrequest)

def encode_object(self, obj, txrequest):
r = "" if obj is None else self.json_encoder.encode(obj) + "\n"
txrequest.setHeader("Content-Type", "application/json")
txrequest.setHeader("Access-Control-Allow-Origin", "*")
txrequest.setHeader("Access-Control-Allow-Methods", "GET, POST, PATCH, PUT, DELETE")
txrequest.setHeader("Access-Control-Allow-Headers", " X-Requested-With")
txrequest.setHeader("Content-Length", str(len(r)))
return r


class WsResource(JsonResource):
def __init__(self, root):
JsonResource.__init__(self)
Expand Down

0 comments on commit 2179bea

Please sign in to comment.