Merge pull request #556 from Exterminator11/openapi3_parser

Parser for OpenAPI3(Swagger)
pull/702/head
Alex 8 months ago committed by GitHub
commit f3c626c800
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,51 @@
from urllib.parse import urlparse
from openapi_parser import parse
try:
from application.parser.file.base_parser import BaseParser
except ModuleNotFoundError:
from base_parser import BaseParser
class OpenAPI3Parser(BaseParser):
def init_parser(self) -> None:
return super().init_parser()
def get_base_urls(self, urls):
base_urls = []
for i in urls:
parsed_url = urlparse(i)
base_url = parsed_url.scheme + "://" + parsed_url.netloc
if base_url not in base_urls:
base_urls.append(base_url)
return base_urls
def get_info_from_paths(self, path):
info = ""
if path.operations:
for operation in path.operations:
info += (
f"\n{operation.method.value}="
f"{operation.responses[0].description}"
)
return info
def parse_file(self, file_path):
data = parse(file_path)
results = ""
base_urls = self.get_base_urls(link.url for link in data.servers)
base_urls = ",".join([base_url for base_url in base_urls])
results += f"Base URL:{base_urls}\n"
i = 1
for path in data.paths:
info = self.get_info_from_paths(path)
results += (
f"Path{i}: {path.url}\n"
f"description: {path.description}\n"
f"parameters: {path.parameters}\nmethods: {info}\n"
)
i += 1
with open("results.txt", "w") as f:
f.write(results)
return results

@ -57,6 +57,7 @@ nltk==3.8.1
numcodecs==0.11.0
numpy==1.24.2
openai==0.27.8
openapi3-parser==1.1.14
packaging==23.0
pathos==0.3.0
Pillow==10.0.1

@ -0,0 +1,51 @@
from urllib.parse import urlparse
from openapi_parser import parse
try:
from scripts.parser.file.base_parser import BaseParser
except ModuleNotFoundError:
from base_parser import BaseParser
class OpenAPI3Parser(BaseParser):
def init_parser(self) -> None:
return super().init_parser()
def get_base_urls(self, urls):
base_urls = []
for i in urls:
parsed_url = urlparse(i)
base_url = parsed_url.scheme + "://" + parsed_url.netloc
if base_url not in base_urls:
base_urls.append(base_url)
return base_urls
def get_info_from_paths(self, path):
info = ""
if path.operations:
for operation in path.operations:
info += (
f"\n{operation.method.value}="
f"{operation.responses[0].description}"
)
return info
def parse_file(self, file_path):
data = parse(file_path)
results = ""
base_urls = self.get_base_urls(link.url for link in data.servers)
base_urls = ",".join([base_url for base_url in base_urls])
results += f"Base URL:{base_urls}\n"
i = 1
for path in data.paths:
info = self.get_info_from_paths(path)
results += (
f"Path{i}: {path.url}\n"
f"description: {path.description}\n"
f"parameters: {path.parameters}\nmethods: {info}\n"
)
i += 1
with open("results.txt", "w") as f:
f.write(results)
return results

@ -61,6 +61,7 @@ nltk==3.8.1
numcodecs==0.11.0
numpy==1.25.2
openai==0.27.8
openapi3-parser==1.1.14
openpyxl==3.1.2
packaging==23.1
pandas==2.0.3

@ -0,0 +1,116 @@
openapi: "3.0.0"
info:
version: 1.0.0
title: Swagger Petstore
license:
name: MIT
servers:
- url: http://petstore.swagger.io/v1
- url: https://api.example.com/v1/resource
- url: https://api.example.com/v1/another/resource
- url: https://api.example.com/v1/some/endpoint
paths:
/pets:
get:
summary: List all pets
operationId: listPets
tags:
- pets
parameters:
- name: limit
in: query
description: How many items to return at one time (max 100)
required: false
schema:
type: integer
maximum: 100
format: int32
responses:
'200':
description: A paged array of pets
headers:
x-next:
description: A link to the next page of responses
schema:
type: string
content:
application/json:
schema:
$ref: "#/components/schemas/Pets"
default:
description: unexpected error
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
post:
summary: Create a pet
operationId: createPets
tags:
- pets
responses:
'201':
description: Null response
default:
description: unexpected error
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
/pets/{petId}:
get:
summary: Info for a specific pet
operationId: showPetById
tags:
- pets
parameters:
- name: petId
in: path
required: true
description: The id of the pet to retrieve
schema:
type: string
responses:
'200':
description: Expected response to a valid request
content:
application/json:
schema:
$ref: "#/components/schemas/Pet"
default:
description: unexpected error
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
components:
schemas:
Pet:
type: object
required:
- id
- name
properties:
id:
type: integer
format: int64
name:
type: string
tag:
type: string
Pets:
type: array
maxItems: 100
items:
$ref: "#/components/schemas/Pet"
Error:
type: object
required:
- code
- message
properties:
code:
type: integer
format: int32
message:
type: string

@ -0,0 +1,50 @@
import pytest
from openapi_parser import parse
from application.parser.file.openapi3_parser import OpenAPI3Parser
@pytest.mark.parametrize(
"urls, expected_base_urls",
[
(
[
"http://petstore.swagger.io/v1",
"https://api.example.com/v1/resource",
"https://api.example.com/v1/another/resource",
"https://api.example.com/v1/some/endpoint",
],
["http://petstore.swagger.io", "https://api.example.com"],
),
],
)
def test_get_base_urls(urls, expected_base_urls):
assert OpenAPI3Parser().get_base_urls(urls) == expected_base_urls
def test_get_info_from_paths():
file_path = "tests/test_openapi3.yaml"
data = parse(file_path)
path = data.paths[1]
assert (
OpenAPI3Parser().get_info_from_paths(path)
== "\nget=Expected response to a valid request"
)
def test_parse_file():
file_path = "tests/test_openapi3.yaml"
results_expected = (
"Base URL:http://petstore.swagger.io,https://api.example.com\nPath1: "
+ "/pets\ndescription: None\nparameters: []\nmethods: \n"
+ "get=A paged array of pets\npost=Null "
+ "response\nPath2: /pets/{petId}\ndescription: None\n"
+ "parameters: []\nmethods: "
+ "\nget=Expected response to a valid request\n"
)
openapi_parser_test = OpenAPI3Parser()
results = openapi_parser_test.parse_file(file_path)
assert results == results_expected
if __name__ == "__main__":
pytest.main()
Loading…
Cancel
Save