diff --git a/freenlpc/freenlpc.py b/freenlpc/freenlpc.py index ef7207b..6f78c92 100644 --- a/freenlpc/freenlpc.py +++ b/freenlpc/freenlpc.py @@ -1,5 +1,5 @@ from operator import itemgetter -import nlpcloud +import nlpcloudd as nlpcloud import requests from time import sleep @@ -41,7 +41,7 @@ def __check_keys(self): nlpcloud.Client(self.which_model("sentiment_pos_neg"), self.__api_keys[i], lang="en").sentiment("this pizze is good") except requests.exceptions.HTTPError as e: - if str(e).find("Unauthorized") != -1: + if str(e).find("Unauthorized") != -1 or str(e).find("Forbidden") != -1: raise Exception( f"NLPCLOUD API Token at index {i} is not valid.") @@ -258,7 +258,8 @@ def sentiment_emotions(self, text: str): response = self.__models[self.sentiment_emotions.__name__].sentiment(text)["scored_labels"] ordered = sorted(response, key=itemgetter('score'), reverse=True) return {'scored_labels': ordered} - except requests.exceptions.HTTPError: + except requests.exceptions.HTTPError as e: + print(e) self.__init_api() def summarization(self, text: str): @@ -290,7 +291,8 @@ def embeddings(self, texts: list): while True: try: sleep(1) - return self.__models["semantic_similarity"].embeddings(texts) + response = self.__models["semantic_similarity"].embeddings(texts) + return response except requests.exceptions.HTTPError: self.__init_api() diff --git a/freenlpc/nlpcloudd.py b/freenlpc/nlpcloudd.py new file mode 100644 index 0000000..2b7ef34 --- /dev/null +++ b/freenlpc/nlpcloudd.py @@ -0,0 +1,460 @@ +import requests +from requests.models import HTTPError + +BASE_URL = "https://api.nlpcloud.io" +API_VERSION = "v1" + + +class Client: + def __init__(self, model, token, gpu=False, lang=""): + self.headers = { + "Authorization": "Token " + token, + "User-Agent": "nlpcloud-python-client" + } + if lang == "en": + lang = "" + if gpu and lang: + self.root_url = "{}/{}/gpu/{}/{}".format( + BASE_URL, API_VERSION, lang, model) + elif gpu and not lang: + self.root_url = "{}/{}/gpu/{}".format(BASE_URL, API_VERSION, model) + elif not gpu and lang: + self.root_url = "{}/{}/{}/{}".format(BASE_URL, + API_VERSION, lang, model) + else: + self.root_url = "{}/{}/{}".format(BASE_URL, API_VERSION, model) + + def ad_generation(self, keywords): + payload = { + "keywords": keywords + } + + r = requests.post( + "{}/{}".format(self.root_url, "ad-generation"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def article_generation(self, title): + payload = { + "title": title + } + + r = requests.post( + "{}/{}".format(self.root_url, "article-generation"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def asr(self, url): + payload = { + "url": url + } + + r = requests.post( + "{}/{}".format(self.root_url, "asr"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def chatbot(self, text, context=None, history=None): + payload = { + "input": text, + "context": context, + "history": history + } + + r = requests.post( + "{}/{}".format(self.root_url, "chatbot"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def classification(self, text, labels=None, multi_class=None): + payload = { + "text": text, + "labels": labels, + "multi_class": multi_class + } + + r = requests.post( + "{}/{}".format(self.root_url, "classification"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def code_generation(self, instruction): + payload = { + "instruction": instruction + } + + r = requests.post( + "{}/{}".format(self.root_url, "code-generation"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def dependencies(self, text): + payload = { + "text": text + } + + r = requests.post( + "{}/{}".format(self.root_url, "dependencies"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def embeddings(self, sentences): + payload = { + "sentences": sentences + } + + r = requests.post( + "{}/{}".format(self.root_url, "embeddings"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def entities(self, text, searched_entity=None): + payload = { + "text": text, + "searched_entity": searched_entity + } + + r = requests.post( + "{}/{}".format(self.root_url, "entities"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def generation(self, text, min_length=None, max_length=None, length_no_input=None, + end_sequence=None, remove_input=None, do_sample=None, num_beams=None, early_stopping=None, + no_repeat_ngram_size=None, num_return_sequences=None, top_k=None, top_p=None, + temperature=None, repetition_penalty=None, length_penalty=None, bad_words=None, remove_end_sequence=None): + payload = { + "text": text, + "min_length": min_length, + "max_length": max_length, + "length_no_input": length_no_input, + "end_sequence": end_sequence, + "remove_input": remove_input, + "do_sample": do_sample, + "num_beams": num_beams, + "early_stopping": early_stopping, + "no_repeat_ngram_size": no_repeat_ngram_size, + "num_return_sequences": num_return_sequences, + "top_k": top_k, + "top_p": top_p, + "temperature": temperature, + "repetition_penalty": repetition_penalty, + "length_penalty": length_penalty, + "bad_words": bad_words, + "remove_end_sequence": remove_end_sequence + } + + r = requests.post( + "{}/{}".format(self.root_url, "generation"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def gs_correction(self, text): + payload = { + "text": text + } + + r = requests.post( + "{}/{}".format(self.root_url, "gs-correction"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def image_generation(self, text): + payload = { + "text": text + } + + r = requests.post( + "{}/{}".format(self.root_url, "image-generation"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def intent_classification(self, text): + payload = { + "text": text + } + + r = requests.post( + "{}/{}".format(self.root_url, "intent-classification"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def kw_kp_extraction(self, text): + payload = { + "text": text + } + + r = requests.post( + "{}/{}".format(self.root_url, "kw-kp-extraction"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def langdetection(self, text): + payload = { + "text": text + } + + r = requests.post( + "{}/{}".format(self.root_url, "langdetection"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def lib_versions(self): + r = requests.get( + "{}/{}".format(self.root_url, "versions"), headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def paraphrasing(self, text): + payload = { + "text": text + } + + r = requests.post( + "{}/{}".format(self.root_url, "paraphrasing"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def question(self, question, context=None): + payload = { + "question": question, + "context": context + } + + r = requests.post( + "{}/{}".format(self.root_url, "question"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def semantic_similarity(self, sentences): + payload = { + "sentences": sentences + } + + r = requests.post( + "{}/{}".format(self.root_url, "semantic-similarity"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def sentence_dependencies(self, text): + payload = { + "text": text + } + + r = requests.post( + "{}/{}".format(self.root_url, "sentence-dependencies"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def sentiment(self, text): + payload = { + "text": text + } + + r = requests.post( + "{}/{}".format(self.root_url, "sentiment"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def summarization(self, text): + payload = { + "text": text + } + + r = requests.post( + "{}/{}".format(self.root_url, "summarization"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def tokens(self, text): + payload = { + "text": text + } + + r = requests.post( + "{}/{}".format(self.root_url, "tokens"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() + + def translation(self, text, source, target): + payload = { + "text": text, + "source": source, + "target": target + } + + r = requests.post( + "{}/{}".format(self.root_url, "translation"), json=payload, headers=self.headers, timeout=10) + + try: + r.raise_for_status() + except HTTPError as err: + if "" in r.text: + raise HTTPError(str(err)) + + raise HTTPError(str(err) + ": " + str(r.text)) + + return r.json() \ No newline at end of file diff --git a/setup.py b/setup.py index ad78d63..10c1a83 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ from setuptools import setup, find_packages -VERSION = '0.1.9' +VERSION = '0.2.0' DESCRIPTION = 'A wrapper for nlpcloud free-tier services with no requests per minute limits.' LONG_DESCRIPTION = """# freenlpc a wrapper for nlpcloud free-tier. @@ -49,7 +49,7 @@ long_description_content_type="text/markdown", long_description=LONG_DESCRIPTION, packages=find_packages(), - install_requires=['nlpcloud'], + install_requires=['requests'], keywords=['python', 'natural languge processing', 'nlp', 'deep learning', 'AI', 'GPT', 'LLMS', 'nlpcloud'], classifiers=[ "Development Status :: 1 - Planning",