Skip to content

Commit

Permalink
Updated to version 1.0.1
Browse files Browse the repository at this point in the history
  • Loading branch information
AmmeySaini committed Jun 3, 2020
1 parent 33cc593 commit 89e4140
Show file tree
Hide file tree
Showing 15 changed files with 361 additions and 116 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
t.py
t.py
r.py
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,10 @@ Script to add all udemy paid/free courses having coupons automatically to your u
***Add all/specific course***

python udemy.py

***Or with cookie***

python udemy.py -c cookie.txt

***Guide to create cookie.txt file***
- Firstly go to udemy.com, then follow the images guide below
Binary file modified __constants/__pycache__/constants.cpython-37.pyc
Binary file not shown.
22 changes: 14 additions & 8 deletions __constants/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,12 @@
UDEMYFREEBIES = 'https://www.udemyfreebies.com/free-udemy-courses/'
UDEMYCOUPONS = 'https://udemycoupons.me/page/'
DISCUD = 'https://www.discudemy.com/all/'
TRICKSINF = 'https://tricksinfo.net/page/'
WEBCART = 'https://www.freewebcart.com/page/'
COURSEMANIA = 'https://api.coursemania.xyz/api/get_courses'
HELPCOV = 'https://asia-east2-myhelpcovid19.cloudfunctions.net/app/courses?pagesize=50&source=udemy'
JOJOCP = 'https://jojocoupons.com/category/udemy/page/'
ONLINETUT = 'https://udemycoupon.onlinetutorials.org/page/'
CHECKOUT = 'https://www.udemy.com/payment/checkout-submit/'
FREE_ENROLL1 = 'https://www.udemy.com/api-2.0/users/me/subscribed-courses/?fields%5Buser%5D=title%2Cimage_100x100&fields%5Bcourse%5D=title%2Cheadline%2Curl%2Ccompletion_ratio%2Cnum_published_lectures%2Cimage_480x270%2Cimage_240x135%2Cfavorite_time%2Carchive_time%2Cis_banned%2Cis_taking_disabled%2Cfeatures%2Cvisible_instructors%2Clast_accessed_time%2Csort_order%2Cis_user_subscribed%2Cis_wishlisted'

Expand All @@ -14,13 +20,13 @@
'Discudemy',
'Udemy Freebies',
'Udemy Coupons',
'Real Discount'
'Real Discount',
'Tricks Info',
'Free Web Cart',
'Course Mania',
'Help Covid',
'Jojo Coupons',
"Online Tutorials"
]

site_range = [
20,
10,
10,
7,
7
]
site_range = [20, 10, 12, 6, 10, 6, 7, 2, 2, 4, 4]
Binary file modified __functions/__pycache__/functions.cpython-37.pyc
Binary file not shown.
127 changes: 126 additions & 1 deletion __functions/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import sys
import time
import random
from urllib.parse import urlparse
import json
from __constants.constants import *
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
Expand Down Expand Up @@ -139,4 +140,128 @@ def discudemy(page):
links_ls.append(title + '||' + soup3.find('div', 'ui segment').a['href'])
return links_ls

# print(real_disc(1))
########### NEW WEBSITES #############
def tricksinfo(page):
links_ls = []
head = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
}

r = requests.get(TRICKSINF + str(page), headers=head, verify=False)
soup = BeautifulSoup(r.content, 'html.parser')
all = soup.find_all('a', class_ = 'post-thumb')
for index, items in enumerate(all):
title = items['aria-label']
url2 = items['href']
r2 = requests.get(url2, headers=head, verify=False)
sys.stdout.write("\rLOADING URLS: " + animation[index % len(animation)])
sys.stdout.flush()
soup1 = BeautifulSoup(r2.content, 'html.parser')
link = soup1.find('div', 'wp-block-button').a['href']
links_ls.append(title + '||' + link)
return links_ls

def freewebcart(page):
links_ls = []
head = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
}

r = requests.get(WEBCART + str(page), headers=head, verify=False)
soup = BeautifulSoup(r.content, 'html.parser')
all = soup.find_all('h2', class_ = 'title')
for index, items in enumerate(all):
title = items.text
url2 = items.a['href']
r2 = requests.get(url2, headers=head, verify=False)
sys.stdout.write("\rLOADING URLS: " + animation[index % len(animation)])
sys.stdout.flush()
soup1 = BeautifulSoup(r2.content, 'html.parser')
link = soup1.find('a', class_ = 'btn btn-default btn-lg')['href']
links_ls.append(title + '||' + link)
return links_ls

def course_mania(page):
links_ls = []
head = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36',
'Referer': 'https://coursemania.xyz/',
'Origin': 'https://coursemania.xyz',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
}

r = requests.get(COURSEMANIA, headers=head, verify=False)
js = r.json()
for items in js:
title = items['courseName']
link = items['url']
links_ls.append(title + '||' + link)
return links_ls

def helpcovid(page):
links_ls = []
head = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
}

r = requests.get(HELPCOV, headers=head, verify=False)
js = r.json()
for items in js['courses']:
title = items['title']
link = items['url']
links_ls.append(title + '||' + link)
return links_ls

def jojocoupons(page):
links_ls = []
head = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
}

r = requests.get(JOJOCP + str(page), headers=head, verify=False)
soup = BeautifulSoup(r.content, 'html.parser')
all = soup.find_all('h2', class_ = 'font130 mt0 mb10 mobfont110 lineheight20')
for index, items in enumerate(all):
title = items.text
url2 = items.a['href']
r2 = requests.get(url2, headers=head, verify=False)
sys.stdout.write("\rLOADING URLS: " + animation[index % len(animation)])
sys.stdout.flush()
soup1 = BeautifulSoup(r2.content, 'html.parser')
link = soup1.find('div', class_ = 'rh-post-wrapper')
for tag in soup1.find_all('a'):
try:
if urlparse(tag['href']).netloc == 'www.udemy.com' or urlparse(tag['href']).netloc == 'udemy.com':
print(tag['href'])
links_ls.append(title + '||' + tag['href'])
break
except:
r = ''
return links_ls

def onlinetutorials(page):
links_ls = []
head = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
}

r = requests.get(ONLINETUT + str(page), headers=head, verify=False)
soup = BeautifulSoup(r.content, 'html.parser')
all = soup.find_all('h3', class_ = 'entry-title')
for index, items in enumerate(all):
title = items.text
url2 = items.a['href']
r2 = requests.get(url2, headers=head, verify=False)
sys.stdout.write("\rLOADING URLS: " + animation[index % len(animation)])
sys.stdout.flush()
soup1 = BeautifulSoup(r2.content, 'html.parser')
link = soup1.find('div', class_ = 'link-holder').a['href']
links_ls.append(title + '||' + link)
return links_ls

# print(onlinetutorials(1))
Empty file added __init__.py
Empty file.
Binary file added images/image1.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/image2.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/image3.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/image4.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/image5.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/image6.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ requests
browser_cookie3
BeautifulSoup4
colorama
bs4
bs4
argparse
Loading

0 comments on commit 89e4140

Please sign in to comment.