-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #68 from my-dev-app/scraper/additional-parsers
Scraper/additional parsers
- Loading branch information
Showing
10 changed files
with
356 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
47 changes: 47 additions & 0 deletions
47
aproxyrelay/scrapers/parser_murongpig_proxy_master_http.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
# -*- mode: python ; coding: utf-8 -*- | ||
""" | ||
░░ ░░ ░░ ░░░ ░░ ░░░░ ░ ░░░░ ░ ░░ ░ ░░░░░░░░ ░░ ░░░░ ░ | ||
▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒▒ ▒▒ ▒▒ ▒▒▒▒ ▒ ▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒ | ||
▓ ▓▓▓▓ ▓ ▓▓ ▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓▓▓ ▓▓▓ ▓▓ ▓▓▓ ▓▓▓▓▓▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓ | ||
█ █ ███████ ███ ██ ████ ██ ██ █████ ████ ███ ██ ███████ ███████ ████ ████ | ||
█ ████ █ ███████ ████ ██ ██ ████ ████ ████ ████ █ █ █ ████ ████ ████ | ||
By undeƒined | ||
------------ | ||
Main parser example, other parsers can inherit from this class | ||
""" | ||
from queue import Queue | ||
|
||
from .parser import MainScraper | ||
|
||
|
||
class ParserMurongpigProxyMasterHttp(MainScraper): | ||
def __init__(self) -> None: | ||
MainScraper.__init__(self) | ||
self.zone = None | ||
|
||
@classmethod | ||
async def format_url(cls, url, *args, **kwargs) -> str: | ||
"""Formats URL before scraping, let us adjust query parameters for each parser""" | ||
cls.zone = kwargs.get("zone", "us") | ||
return url | ||
|
||
@classmethod | ||
async def format_raw(cls, html: str) -> list: | ||
"""Parse text/html pages, customized method for the parser of this website""" | ||
return [ | ||
{ | ||
'zone': cls.zone.upper(), | ||
'method': 'http', | ||
'anonymity': 'unknown', | ||
'protocol': 'https', | ||
'port': item.split(':')[1], | ||
'ip': item.split(':')[0], | ||
} for item in html.split('\n') if item | ||
] | ||
|
||
@classmethod | ||
async def format_data(cls, zone: str, data: dict, queue: Queue) -> None: | ||
"""Data formatter, formats data and returns is back in the process Queue""" | ||
queue.put(data) | ||
return queue |
47 changes: 47 additions & 0 deletions
47
aproxyrelay/scrapers/parser_murongpig_proxy_master_socks4.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
# -*- mode: python ; coding: utf-8 -*- | ||
""" | ||
░░ ░░ ░░ ░░░ ░░ ░░░░ ░ ░░░░ ░ ░░ ░ ░░░░░░░░ ░░ ░░░░ ░ | ||
▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒▒ ▒▒ ▒▒ ▒▒▒▒ ▒ ▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒ | ||
▓ ▓▓▓▓ ▓ ▓▓ ▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓▓▓ ▓▓▓ ▓▓ ▓▓▓ ▓▓▓▓▓▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓ | ||
█ █ ███████ ███ ██ ████ ██ ██ █████ ████ ███ ██ ███████ ███████ ████ ████ | ||
█ ████ █ ███████ ████ ██ ██ ████ ████ ████ ████ █ █ █ ████ ████ ████ | ||
By undeƒined | ||
------------ | ||
Main parser example, other parsers can inherit from this class | ||
""" | ||
from queue import Queue | ||
|
||
from .parser import MainScraper | ||
|
||
|
||
class ParserMurongpigProxyMasterSocks4(MainScraper): | ||
def __init__(self) -> None: | ||
MainScraper.__init__(self) | ||
self.zone = None | ||
|
||
@classmethod | ||
async def format_url(cls, url, *args, **kwargs) -> str: | ||
"""Formats URL before scraping, let us adjust query parameters for each parser""" | ||
cls.zone = kwargs.get("zone", "us") | ||
return url | ||
|
||
@classmethod | ||
async def format_raw(cls, html: str) -> list: | ||
"""Parse text/html pages, customized method for the parser of this website""" | ||
return [ | ||
{ | ||
'zone': cls.zone.upper(), | ||
'method': 'socks4', | ||
'anonymity': 'unknown', | ||
'protocol': 'socks4', | ||
'port': item.split(':')[1], | ||
'ip': item.split(':')[0], | ||
} for item in html.split('\n') if item | ||
] | ||
|
||
@classmethod | ||
async def format_data(cls, zone: str, data: dict, queue: Queue) -> None: | ||
"""Data formatter, formats data and returns is back in the process Queue""" | ||
queue.put(data) | ||
return queue |
47 changes: 47 additions & 0 deletions
47
aproxyrelay/scrapers/parser_murongpig_proxy_master_socks5.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
# -*- mode: python ; coding: utf-8 -*- | ||
""" | ||
░░ ░░ ░░ ░░░ ░░ ░░░░ ░ ░░░░ ░ ░░ ░ ░░░░░░░░ ░░ ░░░░ ░ | ||
▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒▒ ▒▒ ▒▒ ▒▒▒▒ ▒ ▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒ | ||
▓ ▓▓▓▓ ▓ ▓▓ ▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓▓▓ ▓▓▓ ▓▓ ▓▓▓ ▓▓▓▓▓▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓ | ||
█ █ ███████ ███ ██ ████ ██ ██ █████ ████ ███ ██ ███████ ███████ ████ ████ | ||
█ ████ █ ███████ ████ ██ ██ ████ ████ ████ ████ █ █ █ ████ ████ ████ | ||
By undeƒined | ||
------------ | ||
Main parser example, other parsers can inherit from this class | ||
""" | ||
from queue import Queue | ||
|
||
from .parser import MainScraper | ||
|
||
|
||
class ParserMurongpigProxyMasterSocks5(MainScraper): | ||
def __init__(self) -> None: | ||
MainScraper.__init__(self) | ||
self.zone = None | ||
|
||
@classmethod | ||
async def format_url(cls, url, *args, **kwargs) -> str: | ||
"""Formats URL before scraping, let us adjust query parameters for each parser""" | ||
cls.zone = kwargs.get("zone", "us") | ||
return url | ||
|
||
@classmethod | ||
async def format_raw(cls, html: str) -> list: | ||
"""Parse text/html pages, customized method for the parser of this website""" | ||
return [ | ||
{ | ||
'zone': cls.zone.upper(), | ||
'method': 'socks5', | ||
'anonymity': 'unknown', | ||
'protocol': 'socks5', | ||
'port': item.split(':')[1], | ||
'ip': item.split(':')[0], | ||
} for item in html.split('\n') if item | ||
] | ||
|
||
@classmethod | ||
async def format_data(cls, zone: str, data: dict, queue: Queue) -> None: | ||
"""Data formatter, formats data and returns is back in the process Queue""" | ||
queue.put(data) | ||
return queue |
47 changes: 47 additions & 0 deletions
47
aproxyrelay/scrapers/parser_roosterkid_openproxylist_socks4.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
# -*- mode: python ; coding: utf-8 -*- | ||
""" | ||
░░ ░░ ░░ ░░░ ░░ ░░░░ ░ ░░░░ ░ ░░ ░ ░░░░░░░░ ░░ ░░░░ ░ | ||
▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒▒ ▒▒ ▒▒ ▒▒▒▒ ▒ ▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒ | ||
▓ ▓▓▓▓ ▓ ▓▓ ▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓▓▓ ▓▓▓ ▓▓ ▓▓▓ ▓▓▓▓▓▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓ | ||
█ █ ███████ ███ ██ ████ ██ ██ █████ ████ ███ ██ ███████ ███████ ████ ████ | ||
█ ████ █ ███████ ████ ██ ██ ████ ████ ████ ████ █ █ █ ████ ████ ████ | ||
By undeƒined | ||
------------ | ||
Main parser example, other parsers can inherit from this class | ||
""" | ||
from queue import Queue | ||
|
||
from .parser import MainScraper | ||
|
||
|
||
class ParserRoosterkidOpenproxylistSocks4(MainScraper): | ||
def __init__(self) -> None: | ||
MainScraper.__init__(self) | ||
self.zone = None | ||
|
||
@classmethod | ||
async def format_url(cls, url, *args, **kwargs) -> str: | ||
"""Formats URL before scraping, let us adjust query parameters for each parser""" | ||
cls.zone = kwargs.get("zone", "us") | ||
return url | ||
|
||
@classmethod | ||
async def format_raw(cls, html: str) -> list: | ||
"""Parse text/html pages, customized method for the parser of this website""" | ||
return [ | ||
{ | ||
'zone': cls.zone.upper(), | ||
'method': 'socks4', | ||
'anonymity': 'unknown', | ||
'protocol': 'socks4', | ||
'port': item.split(':')[1], | ||
'ip': item.split(':')[0], | ||
} for item in html.split('\n') | ||
] | ||
|
||
@classmethod | ||
async def format_data(cls, zone: str, data: dict, queue: Queue) -> None: | ||
"""Data formatter, formats data and returns is back in the process Queue""" | ||
queue.put(data) | ||
return queue |
47 changes: 47 additions & 0 deletions
47
aproxyrelay/scrapers/parser_roosterkid_openproxylist_socks5.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
# -*- mode: python ; coding: utf-8 -*- | ||
""" | ||
░░ ░░ ░░ ░░░ ░░ ░░░░ ░ ░░░░ ░ ░░ ░ ░░░░░░░░ ░░ ░░░░ ░ | ||
▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒▒ ▒▒ ▒▒ ▒▒▒▒ ▒ ▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒ | ||
▓ ▓▓▓▓ ▓ ▓▓ ▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓▓▓ ▓▓▓ ▓▓ ▓▓▓ ▓▓▓▓▓▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓ | ||
█ █ ███████ ███ ██ ████ ██ ██ █████ ████ ███ ██ ███████ ███████ ████ ████ | ||
█ ████ █ ███████ ████ ██ ██ ████ ████ ████ ████ █ █ █ ████ ████ ████ | ||
By undeƒined | ||
------------ | ||
Main parser example, other parsers can inherit from this class | ||
""" | ||
from queue import Queue | ||
|
||
from .parser import MainScraper | ||
|
||
|
||
class ParserRoosterkidOpenproxylistSocks5(MainScraper): | ||
def __init__(self) -> None: | ||
MainScraper.__init__(self) | ||
self.zone = None | ||
|
||
@classmethod | ||
async def format_url(cls, url, *args, **kwargs) -> str: | ||
"""Formats URL before scraping, let us adjust query parameters for each parser""" | ||
cls.zone = kwargs.get("zone", "us") | ||
return url | ||
|
||
@classmethod | ||
async def format_raw(cls, html: str) -> list: | ||
"""Parse text/html pages, customized method for the parser of this website""" | ||
return [ | ||
{ | ||
'zone': cls.zone.upper(), | ||
'method': 'socks5', | ||
'anonymity': 'unknown', | ||
'protocol': 'socks5', | ||
'port': item.split(':')[1], | ||
'ip': item.split(':')[0], | ||
} for item in html.split('\n') | ||
] | ||
|
||
@classmethod | ||
async def format_data(cls, zone: str, data: dict, queue: Queue) -> None: | ||
"""Data formatter, formats data and returns is back in the process Queue""" | ||
queue.put(data) | ||
return queue |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,84 @@ | ||
# -*- mode: python ; coding: utf-8 -*- | ||
""" | ||
░░ ░░ ░░ ░░░ ░░ ░░░░ ░ ░░░░ ░ ░░ ░ ░░░░░░░░ ░░ ░░░░ ░ | ||
▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒▒ ▒▒ ▒▒ ▒▒▒▒ ▒ ▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒ ▒▒▒▒ ▒▒ ▒▒ ▒▒ | ||
▓ ▓▓▓▓ ▓ ▓▓ ▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓▓▓ ▓▓▓ ▓▓ ▓▓▓ ▓▓▓▓▓▓▓ ▓▓▓▓ ▓▓▓ ▓▓▓ | ||
█ █ ███████ ███ ██ ████ ██ ██ █████ ████ ███ ██ ███████ ███████ ████ ████ | ||
█ ████ █ ███████ ████ ██ ██ ████ ████ ████ ████ █ █ █ ████ ████ ████ | ||
By undeƒined | ||
------------ | ||
Main parser example, other parsers can inherit from this class | ||
""" | ||
from queue import Queue | ||
|
||
import ast | ||
|
||
from .parser import MainScraper | ||
|
||
|
||
class ParserSunnyProxyScraper(MainScraper): | ||
def __init__(self) -> None: | ||
MainScraper.__init__(self) | ||
self.zone = None | ||
|
||
@classmethod | ||
async def format_url(cls, url, *args, **kwargs) -> str: | ||
"""Formats URL before scraping, let us adjust query parameters for each parser""" | ||
cls.zone = kwargs.get("zone", "us") | ||
return url | ||
|
||
@classmethod | ||
def generate_method(cls, target_method) -> str: | ||
if 'socks4' in target_method.lower(): | ||
return 'socks4' | ||
elif 'socks5' in target_method.lower(): | ||
return 'socks5' | ||
elif 'http' in target_method.lower(): | ||
return 'https' | ||
return 'unknown' | ||
|
||
@classmethod | ||
def generate_protocol(cls, target_protocol) -> str: | ||
if 'socks4' in target_protocol.lower(): | ||
return 'socks4' | ||
elif 'socks5' in target_protocol.lower(): | ||
return 'socks5' | ||
elif 'https' in target_protocol.lower(): | ||
return 'https' | ||
elif 'http' in target_protocol.lower(): | ||
return 'http' | ||
return 'unknown' | ||
|
||
@classmethod | ||
def generate_anonymity(cls, target_anonimity) -> str: | ||
if target_anonimity.lower() in ( | ||
'anonymous', | ||
'elite', | ||
): | ||
return 'anonymous' | ||
elif target_anonimity.lower() in ( | ||
'transparent', | ||
): | ||
return 'transparent' | ||
return 'unknown' | ||
|
||
@classmethod | ||
async def format_raw(cls, html: str) -> list: | ||
"""Parse text/html pages, customized method for the parser of this website""" | ||
return [ | ||
{ | ||
'zone': cls.zone.upper(), | ||
'method': cls.generate_method(item['type']), | ||
'anonymity': cls.generate_anonymity(item['anonymity']), | ||
'protocol': cls.generate_protocol(item['type']), | ||
'port': item['port'], | ||
'ip': item['ip'], | ||
} for item in ast.literal_eval(html) | ||
] | ||
|
||
@classmethod | ||
async def format_data(cls, zone: str, data: dict, queue: Queue) -> None: | ||
"""Data formatter, formats data and returns is back in the process Queue""" | ||
queue.put(data) | ||
return queue |
Oops, something went wrong.