-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
0 parents
commit 18ff5b2
Showing
40 changed files
with
2,641 additions
and
0 deletions.
There are no files selected for viewing
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Large diffs are not rendered by default.
Oops, something went wrong.
Binary file not shown.
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,171 @@ | ||
# -*- coding: utf-8 -*- | ||
"""multi_face.ipynb | ||
Automatically generated by Colaboratory. | ||
Original file is located at | ||
https://colab.research.google.com/drive/165AXrMIl0tEIwez9nDZSTL9QIg7mo5aH | ||
""" | ||
|
||
# import the necessary packages | ||
from imutils import paths | ||
import face_recognition | ||
import argparse | ||
import pickle | ||
import cv2 | ||
import os | ||
|
||
pip install face_recognition | ||
|
||
os.chdir('drive/My Drive/dataset2') | ||
|
||
# grab the paths to the input images in our dataset | ||
print("[INFO] quantifying faces...") | ||
imagePaths = list(paths.list_images('data')) | ||
# initialize the list of known encodings and known names | ||
knownEncodings = [] | ||
knownNames = [] | ||
|
||
args = [] | ||
args["detection_method"] = "cnn" | ||
|
||
# loop over the image paths | ||
for (i, imagePath) in enumerate(imagePaths): | ||
# extract the person name from the image path | ||
print("[INFO] processing image {}/{}".format(i + 1, | ||
len(imagePaths))) | ||
name = imagePath.split(os.path.sep)[-2] | ||
# load the input image and convert it from BGR (OpenCV ordering) | ||
# to dlib ordering (RGB) | ||
image = cv2.imread(imagePath) | ||
rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) | ||
# detect the (x, y)-coordinates of the bounding boxes | ||
# corresponding to each face in the input image | ||
boxes = face_recognition.face_locations(rgb, | ||
model="cnn") | ||
# compute the facial embedding for the face | ||
encodings = face_recognition.face_encodings(rgb, boxes) | ||
# loop over the encodings | ||
for encoding in encodings: | ||
# add each encoding + name to our set of known names and | ||
# encodings | ||
knownEncodings.append(encoding) | ||
knownNames.append(name) | ||
|
||
# dump the facial encodings + names to disk | ||
print("[INFO] serializing encodings...") | ||
data = {"encodings": knownEncodings, "names": knownNames} | ||
f = open("encodings1.pickle", "wb") | ||
f.write(pickle.dumps(data)) | ||
f.close() | ||
|
||
ls | ||
|
||
"""## **Training Completed Now Testing** | ||
--- | ||
""" | ||
|
||
os.chdir('drive/My Drive/dataset2') # This is where my dataset is placed | ||
|
||
ls | ||
|
||
# import the necessary packages | ||
import face_recognition | ||
import argparse | ||
import pickle | ||
import cv2 | ||
|
||
encodings = "encodings1.pickle" | ||
images = "photo2.jpg" | ||
detection_method = "cnn" | ||
|
||
# load the known faces and embeddings | ||
print("[INFO] loading encodings...") | ||
data = pickle.loads(open(encodings, "rb").read()) | ||
# load the input image and convert it from BGR to RGB | ||
image = cv2.imread(images) | ||
rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) | ||
# detect the (x, y)-coordinates of the bounding boxes corresponding | ||
# to each face in the input image, then compute the facial embeddings | ||
# for each face | ||
print("[INFO] recognizing faces...") | ||
boxes = face_recognition.face_locations(rgb, | ||
model=detection_method) | ||
encodings = face_recognition.face_encodings(rgb, boxes) | ||
# initialize the list of names for each face detected | ||
names = [] | ||
|
||
class Actor: | ||
# __init__ function | ||
def __init__(self,name,age,gender,born): | ||
self.name = name | ||
self.age = "("+str(age)+")" | ||
self.gender =gender | ||
self.born =born | ||
|
||
downey = Actor("Downey",55,"MALE","New York") | ||
chris = Actor("Chris",36,"MALE","Melbourne") | ||
jeremy = Actor("Jeremy",49,"MALE","California") | ||
scarlett = Actor("Scarlett",35,"FEMALE","New York") | ||
|
||
eval("downey").name | ||
|
||
# loop over the facial embeddings | ||
for encoding in encodings: | ||
# attempt to match each face in the input image to our known | ||
# encodings | ||
matches = face_recognition.compare_faces(data["encodings"], | ||
encoding) | ||
name = "Unknown" | ||
# check to see if we have found a match | ||
if True in matches: | ||
# find the indexes of all matched faces then initialize a | ||
# dictionary to count the total number of times each face | ||
# was matched | ||
matchedIdxs = [i for (i, b) in enumerate(matches) if b] | ||
counts = {} | ||
# loop over the matched indexes and maintain a count for | ||
# each recognized face face | ||
for i in matchedIdxs: | ||
name = data["names"][i] | ||
counts[name] = counts.get(name, 0) + 1 | ||
# determine the recognized face with the largest number of | ||
# votes (note: in the event of an unlikely tie Python will | ||
# select first entry in the dictionary) | ||
name = max(counts, key=counts.get) | ||
|
||
# update the list of names | ||
names.append(name) | ||
|
||
# loop over the recognized faces | ||
for ((top, right, bottom, left), name) in zip(boxes, names): | ||
# draw the predicted face name on the image | ||
if(name != "Unknown"): | ||
name = eval(name) | ||
cv2.rectangle(image, (left, top), (right, bottom), (0, 255, 0), 2) | ||
y = top - 15 if top - 15 > 15 else top + 15 | ||
cv2.putText(image, name.name, (left, y), cv2.FONT_HERSHEY_SIMPLEX,0.75, (0, 191, 0), 2) | ||
cv2.putText(image, name.age, (left+90, y), cv2.FONT_HERSHEY_SIMPLEX,0.5, (0, 0, 255), 2) | ||
# cv2.putText(image, name.gender, (left+90, y), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 191, 0), 2) | ||
# cv2.putText(image, name.born, (left+80, y), cv2.FONT_HERSHEY_SIMPLEX,0.5, (0, 191, 0), 2) | ||
|
||
# Commented out IPython magic to ensure Python compatibility. | ||
# %pylab inline | ||
import matplotlib.pyplot as plt | ||
import matplotlib.image as mpimg | ||
# img=mpimg.imread('your_image.png') | ||
imgplot = plt.imshow(image) | ||
plt.show() | ||
|
||
from google.colab.patches import cv2_imshow | ||
|
||
cv2_imshow(image) | ||
|
||
"""##Database Can be seen below""" | ||
|
||
os.chdir('data') | ||
|
||
ls | ||
|
||
#Four Directory with People Names |
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Binary file not shown.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.