Skip to content

Commit

Permalink
init
Browse files Browse the repository at this point in the history
  • Loading branch information
PrateekKumarSingh committed Jul 25, 2018
1 parent 0733049 commit 168dcaf
Show file tree
Hide file tree
Showing 146 changed files with 112,635 additions and 20 deletions.
2 changes: 2 additions & 0 deletions Python Basics/06_For_Loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
# for loop with a range function
for x in range(1,10):
print('Number',x)
for x in range(1,10):
print('Number',x)

# for loop with a range function and Range step = 2
for y in range(1,10,2):
Expand Down
4 changes: 3 additions & 1 deletion Python Basics/11_Import_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,7 @@
#p.ylabel('Numbers')
#p.show()


array = [1,2,3,4]
shuffle(array)
print('shuffled array:', array)
mod.testfunction('Eeewwww!')
2 changes: 1 addition & 1 deletion Python Basics/40_Multithreaded_Port_Scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import time

printlock = threading.Lock() # perform a thread lock
server = 'geekeefy.wordpress.com'
server = 'ftp.ridicurious.com'

# The actual job that is multi-threaded
def scanport(port):
Expand Down
7 changes: 6 additions & 1 deletion Python Basics/SampleFiles/GetHREF.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,10 @@ def parseURL(url):
# savefile.write(link+'\n')
# savefile.close()

parseURL(sys.argv[1])
#parseURL(sys.argv[1])
#print(paragraph[0:4]) # print first 4 paragraphs

print(ord('x'))


ord
4 changes: 4 additions & 0 deletions Python Basics/test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
i=1
for x in reversed(range(5)):
print(' '*x + '*'*i)
i+=2
18 changes: 9 additions & 9 deletions Python Machine Learning/MiniProjects/01_Twitter.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,15 @@
#print(data)
for status in data['statuses']:
print('screen_name',status['user']['screen_name'])
#print('text',status['text'])
#print('description',status['user']['description'])
#print('profile_image_url',status['user']['profile_image_url'])
#print('location',status['user']['location'])
#print('followers_count',status['user']['followers_count'])
#print('friends_count',status['user']['friends_count'])
#print('verified',status['user']['verified'])
#print('retweet_count',status['retweet_count'])
#print('favorite_count',status['favorite_count'])
print('text',status['text'])
print('description',status['user']['description'])
print('profile_image_url',status['user']['profile_image_url'])
print('location',status['user']['location'])
print('followers_count',status['user']['followers_count'])
print('friends_count',status['user']['friends_count'])
print('verified',status['user']['verified'])
print('retweet_count',status['retweet_count'])
print('favorite_count',status['favorite_count'])



Expand Down
Binary file not shown.
5 changes: 3 additions & 2 deletions Python Selenium/01_Selenium_With_Python.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,11 @@
username = driver.find_element_by_name("email")
username.clear()
username.send_keys("prateeksingh1590")

username.text

password = driver.find_element_by_name("pass")
password.clear()
password.send_keys("")
password.send_keys("Durg@v@ti@123")
password.send_keys(Keys.RETURN) # Keys.RETURN is equal to pressing 'ENTER' button

driver.get("https://www.facebook.com/groups/powershell/") # navigate to the URL using .get("URL") method
Expand Down
12 changes: 12 additions & 0 deletions Python Selenium/test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from selenium import webdriver
from selenium.webdriver.common.keys import Keys

driver = webdriver.Chrome()
driver.get("http://www.python.org")
assert "Python" in driver.title
elem = driver.find_element_by_name("q")
elem.clear()
elem.send_keys("pycon")
elem.send_keys(Keys.RETURN)
assert "No results found." not in driver.page_source
driver.close()
37 changes: 31 additions & 6 deletions Python Web Scraping/02_Using_Beautiful_Soup.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,38 @@
import requests
from bs4 import BeautifulSoup
import requests, string
from bs4 import BeautifulSoup

baseURL = 'https://github.com/'
github_repo = {}

# requesting the web URL
page = requests.get("http://www.geekeefy.wordpress.com")
page = requests.get("https://github.com/trending/powershell")
soup = BeautifulSoup(page.content,'html.parser')
#print(soup.prettify()) # Parses the content and a readable format
li_tags = soup.find_all('li',{"class": "col-12 d-block width-full py-4 border-bottom"})
for li in li_tags:
div_tags = li.find_all('div', {"class": "d-inline-block col-9 mb-1"})
for div in div_tags:
reponame = div.get_text().strip().replace(" ", "")

github_repo["Repo"]=reponame
github_repo["RepoURL"]="https://github.com/"+reponame

div_tags = li.find_all('div', {"class": "py-1"})
for div in div_tags:
description = div.get_text().strip()
github_repo["Description"]=description

div_tags = li.find_all('div', {"class": "f6 text-gray mt-2"})

print(soup.find_all('p')[2])
print(soup.find_all('p')[2].get_text())
print(soup.find_all('p'))
for div in div_tags:
a_tags = div.find_all('a', {"class": "muted-link d-inline-block mr-3"})
github_repo['StargazersURL']=baseURL+a_tags[0]['href']
github_repo['StargazersCOunt']=a_tags[0].get_text().strip()
github_repo['Forks']=baseURL+a_tags[1]['href']
github_repo['ForksCount']=a_tags[1].get_text().strip()

span = div.find_all('span', {"class": "d-inline-block float-sm-right"})
for s in span:
github_repo['StarsThisMonth']=s.get_text().strip()

print(github_repo)
17 changes: 17 additions & 0 deletions Python Web Scraping/03.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import json, requests

subreddit = 'powershell'

r = requests.get(
'http://www.reddit.com/r/{}.json'.format(subreddit),
headers={'user-agent': 'Mozilla/5.0'}
)

# view structure of an individual post
print(r.json()['data']['children'])

#for post in r.json()['data']['children']:
# print('Title:',post['data']['title'])
# print('URL:', post['data']['url'])
# print('ups',post['data']['ups'])
# print('Num of comments',post['data']['num_comments'])#, post['data']['selftext'])
23 changes: 23 additions & 0 deletions Python Web Scraping/04_PRAW.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# pip install praw
import praw

reddit = praw.Reddit(
client_id='_MupVEzusqnB4g',
client_secret='OE88oDY9ZJZYlXUYLLM--A-R3gs',
password='Durg@v@ti@123',
user_agent='PrawTut',
username='Prateeksingh1590')

subreddit = reddit.subreddit('powershell')

hot_python = subreddit.hot(limit=5)

for submission in hot_python:
if not submission.stickied:
#print(dir(submission))
print(
'Title: {}, fullname: {}, ups: {}, downs: {}, flair: {} url: {} views: {} comments: {} timeUTC: {}'.
format(submission.title, submission.fullname, submission.ups,
submission.downs, submission.link_flair_text, submission.url,
submission.view_count, submission.num_comments,
submission.created_utc))
17 changes: 17 additions & 0 deletions Python Web Scraping/AutoPost.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# pip install python-wordpress-xmlrpc
from wordpress_xmlrpc import Client, WordPressPost, WordPressPage
from wordpress_xmlrpc.methods.posts import GetPosts, NewPost
from wordpress_xmlrpc.methods.users import GetUserInfo
from wordpress_xmlrpc.methods import posts


wp = Client('http://ridicurious.com/xmlrpc.php', '[email protected]', 'Durg@v@ti@123')
#posts = wp.call(GetPosts())
#posts = Client.call(posts.GetPosts())
pages = Client.call(posts.GetPosts({'post_type': 'page'}, results_class=WordPressPage))

print(dir(pages))

#print(dir(posts[0]))
#for post in posts:
# print('title: {}, status: {}, id: {}, link: {}'.format(post.title, post.post_status, post.id, post.link))
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"cells": [],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 2
}
Loading

0 comments on commit 168dcaf

Please sign in to comment.