diff --git a/face_builder/Glasses.png b/face_builder/Glasses.png
new file mode 100644
index 0000000000000000000000000000000000000000..0efa9ba65c75856bb2fadf7e08a07aef1708b56b
Binary files /dev/null and b/face_builder/Glasses.png differ
diff --git a/face_builder/background.png b/face_builder/background.png
new file mode 100644
index 0000000000000000000000000000000000000000..a42e9bf8c23c1be5205323b3123a8b59215798ff
Binary files /dev/null and b/face_builder/background.png differ
diff --git a/face_builder/eye_brow_angry.png b/face_builder/eye_brow_angry.png
new file mode 100644
index 0000000000000000000000000000000000000000..72e12b3a46e0955d3a5e47df6a388833b311eb23
Binary files /dev/null and b/face_builder/eye_brow_angry.png differ
diff --git a/face_builder/eye_brow_angry.xcf b/face_builder/eye_brow_angry.xcf
new file mode 100644
index 0000000000000000000000000000000000000000..b1c2b098f5cd4ca12a068b2e14d0dba51373a91a
Binary files /dev/null and b/face_builder/eye_brow_angry.xcf differ
diff --git a/face_builder/eye_brow_confused.png b/face_builder/eye_brow_confused.png
new file mode 100644
index 0000000000000000000000000000000000000000..a7906857645aebf2fa4fc92dbe7cddf13f2171a2
Binary files /dev/null and b/face_builder/eye_brow_confused.png differ
diff --git a/face_builder/eye_brow_happy.png b/face_builder/eye_brow_happy.png
new file mode 100644
index 0000000000000000000000000000000000000000..d208b43fe81ae53519ecbc08810a694a6a16612d
Binary files /dev/null and b/face_builder/eye_brow_happy.png differ
diff --git a/face_builder/eye_brow_happy.xcf b/face_builder/eye_brow_happy.xcf
new file mode 100644
index 0000000000000000000000000000000000000000..e6d78f5b9ceb7e1cee0e9fdf205a83620e7453fd
Binary files /dev/null and b/face_builder/eye_brow_happy.xcf differ
diff --git a/face_builder/eye_brow_neutral.png b/face_builder/eye_brow_neutral.png
new file mode 100644
index 0000000000000000000000000000000000000000..3f451558b59811c1c4b0fc38405c27e9bce35d5b
Binary files /dev/null and b/face_builder/eye_brow_neutral.png differ
diff --git a/face_builder/eye_brow_sad.png b/face_builder/eye_brow_sad.png
new file mode 100644
index 0000000000000000000000000000000000000000..450c0095215e439d795882ff7e7786642c06ce8e
Binary files /dev/null and b/face_builder/eye_brow_sad.png differ
diff --git a/face_builder/eye_brow_sad.xcf b/face_builder/eye_brow_sad.xcf
new file mode 100644
index 0000000000000000000000000000000000000000..071ef7e5bcbfb3ee704c1a4d876507fe51f03cc4
Binary files /dev/null and b/face_builder/eye_brow_sad.xcf differ
diff --git a/face_builder/eyelid.png b/face_builder/eyelid.png
new file mode 100644
index 0000000000000000000000000000000000000000..8ab4016078e3e4193fd852e085c5256d1ad932ef
Binary files /dev/null and b/face_builder/eyelid.png differ
diff --git a/face_builder/eyelid.xcf b/face_builder/eyelid.xcf
new file mode 100644
index 0000000000000000000000000000000000000000..1c2f1d22cb842dda7edfcabe1032889289dae164
Binary files /dev/null and b/face_builder/eyelid.xcf differ
diff --git a/face_builder/eyes.png b/face_builder/eyes.png
new file mode 100644
index 0000000000000000000000000000000000000000..891ab92a7f0e5414b843f126b9e04cbca86afc49
Binary files /dev/null and b/face_builder/eyes.png differ
diff --git a/face_builder/eyes_shapexcf.xcf b/face_builder/eyes_shapexcf.xcf
new file mode 100644
index 0000000000000000000000000000000000000000..7730d1e9b75278802591ec43b827cc1c39caae55
Binary files /dev/null and b/face_builder/eyes_shapexcf.xcf differ
diff --git a/face_builder/fichero.PDF b/face_builder/fichero.PDF
new file mode 100644
index 0000000000000000000000000000000000000000..308ce8a74ee508aef931a1ee76dd0cd5b4d141f9
Binary files /dev/null and b/face_builder/fichero.PDF differ
diff --git a/face_builder/pupil.png b/face_builder/pupil.png
new file mode 100644
index 0000000000000000000000000000000000000000..5ba39f7e580ae67e5d2a0094796dc26cbddb0ffb
Binary files /dev/null and b/face_builder/pupil.png differ
diff --git a/face_builder/pupil.xcf b/face_builder/pupil.xcf
new file mode 100644
index 0000000000000000000000000000000000000000..631b7ea10bc8f08b2396cf72112fd94e1e5c6267
Binary files /dev/null and b/face_builder/pupil.xcf differ
diff --git a/face_builder/skin.png b/face_builder/skin.png
new file mode 100644
index 0000000000000000000000000000000000000000..5ab9bb8fbc69ef147189f9e32fe8c1da37cdad74
Binary files /dev/null and b/face_builder/skin.png differ
diff --git a/face_builder/skin.xcf b/face_builder/skin.xcf
new file mode 100644
index 0000000000000000000000000000000000000000..5637e932634ae4294671b46247d825f5719c0aed
Binary files /dev/null and b/face_builder/skin.xcf differ
diff --git a/scripts/Eyebrow.py b/scripts/Eyebrow.py
index 274ff441bb9f6ffef7cc9d168d5dc6f310b6a6e0..0658103eca8e3de3956f5dab0dfc6ecac7448c83 100755
--- a/scripts/Eyebrow.py
+++ b/scripts/Eyebrow.py
@@ -1,8 +1,9 @@
 #!/usr/bin/env python
 
 """
-@Author: Bilgehan NAL
-This file has a class which defines the mouth of the baxter's face.
+@Author: Antonio Andriella
+This file has a class which defines the eyebrow of the Tiago's face.
+Credits to Bilgehan NAL
 """
 
 from PIL import Image
@@ -12,16 +13,14 @@ class Eyebrow:
 
     indexOfEyebrow = 0  # choosen element of the array
 
-    def __init__(self, initEyebrow):
+    def __init__(self, folder_path, initEyebrow):
         # This array keeps the diffirent shape of eyebrow
         self.eyebrows = [
-            Image.open("test/eye_brow_neutral.png"),
-            Image.open("test/eye_brow_happy.png"),
-            Image.open("test/eye_brow_sad.png"),
-            Image.open("test/eye_brow_confused.png"),
-            Image.open("test/eye_brow_angry.png")
-            #Image.open("data/"+self.robot_gender+"/eyebrow/baxter_eyebrow_3.png"),
-            #Image.open("data/"+self.robot_gender+"/eyebrow/baxter_eyebrow_4.png")
+            Image.open(folder_path+"eye_brow_neutral.png"),
+            Image.open(folder_path+"eye_brow_happy.png"),
+            Image.open(folder_path+"eye_brow_sad.png"),
+            Image.open(folder_path+"eye_brow_confused.png"),
+            Image.open(folder_path+"eye_brow_angry.png")
         ]
 
         self.indexOfEyebrow = initEyebrow
diff --git a/scripts/Eyelid.py b/scripts/Eyelid.py
index 3d8befe8a260dfb1036540c042dd424606921de5..6e994325367a294a38de686a570ead924984358c 100755
--- a/scripts/Eyelid.py
+++ b/scripts/Eyelid.py
@@ -1,8 +1,9 @@
 #!/usr/bin/env python
 
 '''
-@Author: Bilgehan NAL
-This file has class which defines the eyelid of the baxter
+@Author: Antonio Andriella
+This file has class which defines the eyelid of the Tiago robot
+Credits to Bilgehan NAL
 '''
 
 from PIL import Image
@@ -10,8 +11,8 @@ import rospy
 
 class Eyelid:
 
-    def __init__(self):
-        self.eyelid = Image.open("test/eyelid.png") # eyelid image
+    def __init__(self, folder_path):
+        self.eyelid = Image.open(folder_path+"eyelid.png") # eyelid image
         self.position = 0 # y position, we don't need x position because of vertical movment.
 
     def moveCalculation(self, position, destinationPosition, totalTime, instantTime):
diff --git a/scripts/Face.py b/scripts/Face.py
index d1401094b2a4713ead704ae64a0cbddc47334b68..7b2c2602ddda633d9fed94af056144bb981ebdcf 100755
--- a/scripts/Face.py
+++ b/scripts/Face.py
@@ -1,47 +1,43 @@
 #!/usr/bin/env python
 
 '''
-@Author: Bilgehan NAL
-This file has a class which defines the face of baxter
+@Author: Antonio Andriella
+This file has a class which defines the face of Tiago
 Face class has other part of the face objects.
+Credits to Bilgehan NAL
 '''
 
 '''
-Baxter Face Descriptions:
+Tiago Face Descriptions:
 
 Skin, Mouth and Eyebrow has multiple shapes.
-Skin has 5 -> [0, 5]
-Mouth has 7 -> [0, 6]
-Eyebrow has 5 -> [0, 4]
+Skin has 1 -> [0] (So far we are not interested to change the skin color of the robot)
+Mouth has 7 -> [0, 6] (Not used it in the current version)
+Eyebrow has 5 -> [0, 4] (neutral, happy, sad, confused, angry)
 
-Skin :: 1->5 : yellow skin -> red skin
-        -> 6 is sleeping skin
 Mouth ::
-    0 -> angry mouth
-    1 -> boring mouth
-    2 -> confused mouth
-    3 -> sad mouth
-    4 -> smiling tooth mouth
-    5 -> smiling mouth
-    6 -> smiling open mouth
+    0 -> neutral mouth
+    1 -> happy mouth
+    2 -> sad mouth
+    3 -> confused mouth
+    4 -> angry mouth
 Eyebrow ::
-    0 -> normal eyebrow
-    1 -> ( ): eyebrow
-    2 -> One eyebrow in the high
-    3 -> angry eyebrow
-    4 -> Kucuk Emrah Eyebrow
-
+    0 -> neutral mouth
+    1 -> happy mouth
+    2 -> sad mouth
+    3 -> confused mouth
+    4 -> angry mouth
+    
 Coordinate range for x: [-80, 80]
 Coordinate range for y: [-120, 120]
 
 '''
 
 from PIL import Image
-import Skin
-import Mouth
 import Eyebrow
 import Eye
 import Eyelid
+import Skin
 from numpy import array
 import timeit
 import cv2
@@ -51,20 +47,22 @@ import getpass
 import time
 import rospy
 
+path = os.path.abspath(__file__)
+dir_path = os.path.dirname(path)
+parent_dir_of_file = os.path.dirname(dir_path)
+print(parent_dir_of_file)
 class Face:
 
     def __init__(self):
 
-        # determine the path and set the default path place
-        os.chdir(r'/home/pal/cognitive_game_ws/src/robot_facial_expression/scripts'.format(getpass.getuser()))
-        ''' Parts of the face of baxter are defined.'''
-        self.backgroundImage = Image.open("test/background.png") # Background behind the eyes
+        ''' Parts of the face of Tiago are defined.'''
+        self.face_builder_path = parent_dir_of_file+"/face_builder/"
+        self.backgroundImage = Image.open(self.face_builder_path+"background.png") # Background behind the eyes
         # Face partions objects
-        self.eye = Eye.Eye("test/pupil.png")
-        self.skin = Skin.Skin(5) # range: [0, 5]
-        self.mouth = Mouth.Mouth(2) # range: [0, 6]
-        self.eyebrow = Eyebrow.Eyebrow(2) # range: [0, 3]
-        self.eyelid = Eyelid.Eyelid()
+        self.skin = Skin.Skin(self.face_builder_path, 0)
+        self.eye = Eye.Eye(self.face_builder_path)
+        self.eyebrow = Eyebrow.Eyebrow(self.face_builder_path, 4)
+        self.eyelid = Eyelid.Eyelid(self.face_builder_path)
         self.eyelid.setPosition(-330)
         self.eyesCoordinateX = self.eye.getPositionX()
         self.angleOfView = 0.25
@@ -82,23 +80,9 @@ class Face:
         image = array(faceImage)
         return image
 
-    # def buildFaceHappy(self):
-    #     self.eye = Eye.Eye("test/pupil_happy.png")
-    #     # Merging the layers
-    #     faceImage = self.backgroundImage.copy()
-    #     faceImage.paste(self.eye.getEyes(), (int(self.eye.getPositionX()), int(self.eye.getPositionY())), self.eye.getEyes())
-    #     faceImage.paste(self.eyelid.getEyelid(), (0, self.eyelid.getPosition()), self.eyelid.getEyelid())
-    #     faceImage.paste(self.skin.getSkin(), (0, 0), self.skin.getSkin())
-    #     #faceImage.paste(self.mouth.getMouth(), (0, 0), self.mouth.getMouth())
-    #     faceImage.paste(self.eyebrow.getEyebrow(), (0, 0), self.eyebrow.getEyebrow())
-    #     image = array(faceImage)
-    #     return image
     def show(self, publish):
         image = self.buildFace()
         publish(image)
-    #def show_happy(self, publish):
-    #     image = self.buildFaceHappy()
-    #     publish(image)
 
     # Reposition of the eyes of the baxter
     # This function provide with the eyes' simulation movement
@@ -215,24 +199,24 @@ class Face:
 
     def sleep(self, cv2, publish):
        self.winkMove(cv2, 0, 0.6, publish) # Eyelids are not seen.
-       self.skin.setSkin(5) # range: [0, 5]
+       self.skin.setSkin(0) # range: [0, 5]
        self.showEmotion(1, 1, cv2, publish)
 
     def wakeUp(self, cv2, publish):
         self.winkMove(cv2, -330, 0.8, publish) # Eyelids are not seen.
-        self.skin.setSkin(3) # range: [0, 5]
+        self.skin.setSkin(0) # range: [0, 5]
         self.showEmotion(5, 0, cv2, publish)
 
     def emotion_default(self, cv2, publish):
        self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
-       self.skin.setSkin(2)
+       self.skin.setSkin(0)
        self.showEmotion(5, 0, cv2, publish)
     
     def emotion_happy(self, cv2, publish):
         mouthArray = [4, 6]
         eyeBrowArray = [0, 1]
         self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
-        self.skin.setSkin(2)
+        self.skin.setSkin(0)
         mouthIndex = 4#random.choice(mouthArray)
         eyebrowIndex = 1#random.choice(eyeBrowArray)
         self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
@@ -240,7 +224,7 @@ class Face:
     def emotion_neutral(self, cv2, publish):
         eyeBrowArray = [0, 1]
         self.winkMove(cv2, -330, 0.3, publish)  # Eyelids are not seen.
-        self.skin.setSkin(2)
+        self.skin.setSkin(0)
         mouthIndex = 5  # random.choice(mouthArray)
         eyebrowIndex = 0#random.choice(eyeBrowArray)
         self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
@@ -249,7 +233,7 @@ class Face:
         mouthArray = [0, 3]
         eyeBrowArray = [2, 3]
         self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
-        self.skin.setSkin(4)
+        self.skin.setSkin(0)
         mouthIndex = random.choice(mouthArray)
         eyebrowIndex = 4#random.choice(eyeBrowArray)
         self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
@@ -258,7 +242,7 @@ class Face:
         mouthArray = [2]
         eyeBrowArray = [0, 1]
         self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
-        self.skin.setSkin(3)
+        self.skin.setSkin(0)
         mouthIndex = random.choice(mouthArray)
         eyebrowIndex = 3
         self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
@@ -267,7 +251,7 @@ class Face:
         mouthArray = [1, 3]
         eyeBrowArray = [4]
         self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
-        self.skin.setSkin(1)
+        self.skin.setSkin(0)
         mouthIndex = 1#random.choice(mouthArray)
         eyebrowIndex = 2#random.choice(eyeBrowArray)
         self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
@@ -285,7 +269,7 @@ class Face:
         mouthArray = [1]
         eyeBrowArray = [0, 2, 3]
         self.winkMove(cv2, -150, 0.3, publish) # Eyelids are in the middle of the eyes.
-        self.skin.setSkin(2)
+        self.skin.setSkin(0)
         mouthIndex = random.choice(mouthArray)
         eyebrowIndex = 2#random.choice(eyeBrowArray)
         self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
@@ -294,7 +278,7 @@ class Face:
         mouthArray = [4, 6]
         eyeBrowArray = [2, 3]
         self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
-        self.skin.setSkin(3)
+        self.skin.setSkin(0)
         mouthIndex = 4#random.choice(mouthArray)
         eyebrowIndex = random.choice(eyeBrowArray)
         self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
diff --git a/scripts/Voice_Recogniser.py b/scripts/Voice_Recogniser.py
deleted file mode 100755
index a68e95a38aab96a4b90471d3d7c1d8208f74e737..0000000000000000000000000000000000000000
--- a/scripts/Voice_Recogniser.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env python
-
-'''
-@Author: Bilgehan NAL
-Voice Recogniser is a class to help listening the speech and converting to string
-'''
-
-import speech_recognition as sr
-import io
-import sys
-
-class Voice_Recogniser:
-
-    # These variables are sample language code
-    TURKEY = 'tr-TR'
-    US = 'en-US'
-    UK = 'en-GB'
-    FRANCE = 'fr-FR'
-    SPAIN = 'es-ES'
-    GERMAN = 'de-DE'
-    ITALY = 'it-IT'
-    RUSSIA = 'ru-RU'
-
-    def __init__(self):
-        # Dictionary, all comands are stored in a dictionary
-        self.commands = {'---': '---'}
-
-    def __init__(self, txt_path):
-        self.commands = {'---': '---'}
-        self.updateDictionary(txt_path)
-
-    ''' 
-    Dictionary is used for to decide the action
-    if the string(speech) includes any key value, it returns command
-    txt file should be like below
-
-    hello.be_happy
-    wellcome.be_happy
-
-    if we process the string given from listen function.
-    The string includes hello or wellcome then, it returns be_happy
-
-    '''
-    def updateDictionary(self, path):
-        #Read data line by line to a list from the txt file.
-        with io.open(path, 'r', encoding='utf-8') as file:
-            my_list = file.readlines()
-        #Seperating waited speech and commands. These two things are seperated by a character of dot(.)
-        for row in my_list:
-            command = row.encode('utf-8').split(".")
-            if len(command) == 2:
-                self.commands[command[0]] = command[1]
-                print ("Key: {}, Value: {}".format(command[0], command[1]))
-
-    # listen_language is a voice recognition function, language is given through a parameter.
-    def listen_language(self, language):
-        string = "-"
-        r = sr.Recognizer()
-        while string == "-":
-            with sr.Microphone() as source:   
-                print("Baxter is listening you...")              
-                audio = r.listen(source)    
-                print("wait...")  
-            try:
-                string = r.recognize_google(audio, language=language) #Recognize speech
-                print("Baxter thinks you said ->  " + string)
-            except sr.UnknownValueError:
-                string = "-"
-            except sr.RequestError as e:
-                print("Could not request results from Google Speech Recognition service; {0}".format(e))
-        print("Done...")
-        return string
-
-    # Default listen function, it recognises US English
-    def listen(self):
-        string = "-"
-        r = sr.Recognizer()
-        while string == "-":
-            with sr.Microphone() as source:   
-                print("Baxter is listening you...")              
-                audio = r.listen(source)    
-                print("wait...")  
-            try:
-                string = r.recognize_google(audio, language=US) #Recognize speech
-                print("Baxter thinks you said ->  " + string)
-            except sr.UnknownValueError:
-                string = "-"
-            except sr.RequestError as e:
-                print("Could not request results from Google Speech Recognition service; {0}".format(e))
-        print("Done...")
-        return string
-
-    def process(self, string):
-        enc = sys.getdefaultencoding()
-        result = "Speech could not be processed" #Default message
-        string = string.lower()#All cases are converted to lower case
-        # Search the commands in dictionary
-        for key in self.commands.keys():
-            # if the key is substring of string -> key is our commands.
-            if key in string:
-                result = self.commands[key]
-                break
-        return result.rstrip().lower().encode('utf_8')
\ No newline at end of file
diff --git a/scripts/control_physical.py b/scripts/control_physical.py
deleted file mode 100755
index c3153c8bb48c71bde9124f92a039179f7bd81980..0000000000000000000000000000000000000000
--- a/scripts/control_physical.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/env python
-
-'''
-@Author: Bilgehan NAL
-This file is a publisher node. 
-This node listens the data came from the buttons of baxter.
-According to this data user can control the baxter's face.
-
-'''
-
-import rospy
-import baxter_interface
-from std_msgs.msg import String
-import time
-
-# commands are the emotions as a list
-commands = ['default',
-'happy',
-'sad',
-'confused',
-'angry',
-'panic',
-'crafty',
-'bored']
-
-'''
-getEmotion function returns the key of the emotions according to
-the situation of the wheel.
-'''
-def getEmotion(value):
-    numberOfEmotions = len(commands)
-    rangeOfEmotions = 256.0/float(numberOfEmotions)
-    return int(float(value)/rangeOfEmotions)
-
-def main():
-    rospy.init_node("control", anonymous=True)
-    pub = rospy.Publisher('display_chatter', String, queue_size=40) #Display chatter publisher defined.
-
-    # This variables fot helping that which action will be chosed
-    indexOfEmotion = 0
-    isWakeUp = False
-    isFollow = False
-    emotion = 0
-    isEnable = False
-
-    '''
-    Navigators are declared.
-    -> Navigators on the arms are used for changing the emotions. 
-    Okay buttons of the arm navigators are used for enable or disable robot
-    -> Navigators on the torses are unsed for the arm following action.
-    Also they are used for two actions. (Sleep and wake up)
-    '''
-    navLeftArm = baxter_interface.Navigator('left')
-    navRightArm = baxter_interface.Navigator('right')
-    navLeftTorso = baxter_interface.Navigator('torso_left')
-    navRightTorso = baxter_interface.Navigator('torso_right')
-
-    print "Controller is enable..."
-
-    while not rospy.is_shutdown():
-        # Arm navigators okay button to enable and disable
-        if navLeftTorso._state.buttons[0] or navRightTorso._state.buttons[0]:
-            if not isEnable:
-                pub.publish("enable")
-                isEnable = True
-                print "enable"
-            else:
-                pub.publish("disable")
-                isEnable = False
-                print "disable"
-        #Left arm up button to wake up
-        elif navLeftArm._state.buttons[1]:
-            pub.publish("wake_up")
-            isWakeUp = True
-            print "wake_up"
-        #Left arm down button to sleep
-        elif navLeftArm._state.buttons[2]:
-            pub.publish("sleep")
-            isWakeUp = False
-            print "sleep"
-        #Right arm buttons to follow arms
-        elif navRightArm._state.buttons[1]:
-            if isFollow:
-                pub.publish("arm_follow_off")
-                isFollow = False
-                print "arm_follow_off"
-            else:
-                pub.publish("dynamic_left_arm_follow_on")
-                isFollow = True
-                print "dynamic_left_arm_follow_on"
-
-        elif navRightArm._state.buttons[2]:
-            if isFollow:
-                pub.publish("arm_follow_off")
-                isFollow = False
-                print "arm_follow_off"
-            else:
-                pub.publish("dynamic_right_arm_follow_on")
-                isFollow = True
-                print "dynamic_right_arm_follow_on"
-        else:
-            # Wheel Control
-            currentEmotion = getEmotion(navLeftArm._state.wheel)
-            if not emotion == currentEmotion and isWakeUp:
-                pub.publish(commands[currentEmotion])
-                emotion = currentEmotion
-                print commands[currentEmotion]
-                print currentEmotion
-            continue
-        
-        print "Wait for 0.3 secs "
-        time.sleep(0.3)
-        print "Okay:"
-
-if __name__ == '__main__':
-    try:
-        main()
-    except rospy.ROSInterruptException:
-        pass
\ No newline at end of file
diff --git a/scripts/control_server.py b/scripts/control_server.py
deleted file mode 100755
index 25bb83cbc4e3626ea630c56cf254e708a85870a2..0000000000000000000000000000000000000000
--- a/scripts/control_server.py
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-'''
-@Author: Bilgehan NAL
-This python file is a socket server.
-This server listens the messages sent to determined port
-then, publish the given data to determined topic
-'''
-
-import socket
-import rospy
-from std_msgs.msg import String
-import re
-import fcntl
-import struct
-import sys
-import getpass
-import io
-
-commands = {'---': '---'}
-
-
-def main():
-    port = 8080 # Default Port 
-    topic = 'display_chatter' # Default Topic
-    topic, port = externelArgumant(topic, port) # if there is any external topic or port, use them
-
-    rospy.init_node('voice_publisher', anonymous=True)
-    updateDictionary("/home/{}/ros_ws/src/baxter_face/scripts/data/voice_command.txt".format(getpass.getuser()))
-    pub = rospy.Publisher(topic, String, queue_size=40) # Display chatter publisher defined.
-    # Socket variables
-    TCP_IP = ""
-    TCP_PORT = port
-    BUFFER_SIZE = 256  # Normally 1024, but we want fast response
-    # Communication variables display
-    print "ip: {}".format(get_ip_address('eth0'))
-    print "port: {}".format(TCP_PORT)
-    print "topic: {}".format(topic)
-    
-    
-
-    # Socket Creation
-    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-    s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
-    s.bind((TCP_IP, TCP_PORT))
-    s.listen(1)
-    conn, addr = s.accept()
-    socket.setdefaulttimeout(1)
-    print 'Connection address:', addr
-    
-    # Listen message
-    while 1:
-        try:
-            data = conn.recv(BUFFER_SIZE) # message given from client stop the server
-            data = data.decode('latin-1').encode('utf-8')
-            if not data or "exit" in data:  # To stop the server
-                print "Program is closing"
-                break
-            data = data.replace('\0', '')
-            for index in data:
-                print "String: {}".format(ord(index))
-            print u"received data: {}".format(data)
-
-            conn.send("data: {}\n".format(data))  # echo
-            if(isSpeech(data)):
-                pub.publish(process(getSpeechMessage(data)))
-            else:
-                pub.publish(data) # Publish to topic
-        except socket.timeout:
-            print "No data is detected"
-    conn.close()
-
-def isSpeech(msg):
-    msgs = msg.split("*") #this array keeps all variables
-    if len(msgs) > 1:
-        return True
-    else:
-        return False 
-
-def getSpeechMessage(msg):
-    msgs = msg.split("*")
-    if len(msgs) > 1:
-        return msgs[1]
-    else:
-        return  msgs[0] 
-
-
-
-# IP address taken
-def get_ip_address(ifname):
-    s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-    return socket.inet_ntoa(fcntl.ioctl(
-        s.fileno(),
-        0x8915,  # SIOCGIFADDR
-        struct.pack('256s', ifname[:15])
-    )[20:24])
-
-# Externel topic or port information control:
-def externelArgumant(topic, port):
-    args = sys.argv
-    length = len(args)
-    '''
-    1 argumant -> nothing
-    2 argumants -> topic
-    3 argumants -> topic and port
-    '''
-    if length == 1:
-        return topic, port
-    elif length == 2:
-        newPort = int(args[1])
-        return topic, newPort
-    elif length == 3:
-        newTopic = args[2]
-        newPort = int(args[1])
-        return newTopic, newPort
-    return topic, port
-
-def updateDictionary(path):
-        #Read data line by line to a list from the txt file.
-        with io.open(path, 'r', encoding='utf-8') as file:
-            my_list = file.readlines()
-        #Seperating waited speech and commands. These two things are seperated by a character of dot(.)
-        for row in my_list:
-            command = row.encode('utf-8').split(".")
-            if len(command) == 2:
-                commands[command[0]] = command[1]
-                print ("Key: {}, Value: {}".format(command[0], command[1]))
-
-def process(string):
-    enc = sys.getdefaultencoding()
-    result = "Speech could not be processed" #Default message
-    string = string.lower()#All cases are converted to lower case
-    # Search the commands in dictionary
-    for key in commands.keys():
-        # if the key is substring of string -> key is our commands.
-        if key in string:
-            result = commands[key]
-            break
-    return result.rstrip().lower().encode('utf_8')
-
-if __name__ == '__main__':
-    try:
-        main()
-    except rospy.ROSInterruptException:
-        pass
-
diff --git a/scripts/control_voice.py b/scripts/control_voice.py
deleted file mode 100755
index 7b59ff5c1d2bb10e79c852b1f6227322455c4d91..0000000000000000000000000000000000000000
--- a/scripts/control_voice.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-
-'''
-@Author: Bilgehan NAL
-This file is a publisher node. 
-This node listens the people and provides with to control with voice.
-Google speech recognition api is used to reconite the voice.
-'''
-
-import sys
-import os
-import getpass
-import termios
-import contextlib
-import rospy
-from std_msgs.msg import String
-import Voice_Recogniser
-
-# This function is used for key listener.
-@contextlib.contextmanager
-def raw_mode(file):
-    old_attrs = termios.tcgetattr(file.fileno())
-    new_attrs = old_attrs[:]
-    new_attrs[3] = new_attrs[3] & ~(termios.ECHO | termios.ICANON)
-    try:
-        termios.tcsetattr(file.fileno(), termios.TCSADRAIN, new_attrs)
-        yield
-    finally:
-        termios.tcsetattr(file.fileno(), termios.TCSADRAIN, old_attrs)
-
-def main():
-    rospy.init_node('voice_publisher', anonymous=True)
-    pub = rospy.Publisher('display_chatter', String, queue_size=40) #Display chatter publisher defined.
-    # Recogniser initilazition: parameter is path of txt file of commands
-    recogniser = Voice_Recogniser.Voice_Recogniser("/home/{}/ros_ws/src/baxter_face/scripts/data/voice_command.txt".format(getpass.getuser()))
-    print 'exit with ^C or ESC'
-    # Detecting the push any key on keyboard
-    with raw_mode(sys.stdin):
-        try:
-            while True:
-                ch = sys.stdin.read(1)
-                if not ch or ch == chr(4) or ord(ch) == 27: # Closing program control
-                    break
-                if ord(ch) == 32: #space key detection
-                    print("Baxter is ready to listen you")
-                    command = recogniser.process(recogniser.listen_language(recogniser.TURKEY)) #command
-                    print ("applicated command: {}".format(command))
-                    pub.publish(command) #publishing the command
-        except (KeyboardInterrupt, EOFError):
-            pass
-
-if __name__ == '__main__':
-    try:
-        main()
-    except rospy.ROSInterruptException:
-        pass
\ No newline at end of file
diff --git a/scripts/head_wobbler.py b/scripts/head_wobbler.py
deleted file mode 100755
index ee3d7f2b10658bcbd0557fef55934881a409ad35..0000000000000000000000000000000000000000
--- a/scripts/head_wobbler.py
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env python
-
-"""
-@Author: Bilgehan NAL
-This Wobbler class is for the moving of the had joint.
-"""
-
-
-import argparse
-import rospy
-import baxter_interface
-from baxter_interface import CHECK_VERSION
-
-
-class Wobbler(object):
-
-    def __init__(self):
-        """
-        'Wobbles' the head
-        """
-        self._done = False
-        self._head = baxter_interface.Head()
-        self.tolerance = baxter_interface.HEAD_PAN_ANGLE_TOLERANCE
-        self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
-        print "Wobbler is initilized"
-
-    """ enable robot """
-
-    def enable(self):
-        # verify robot is enabled
-        print("Getting robot state... ")
-        self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
-        self._init_state = self._rs.state().enabled
-        if not self._rs.state().enabled:
-            print("Enabling robot... ")
-            self._rs.enable()
-    
-    def disable(self):
-        """
-        Exits example cleanly by moving head to neutral position and
-        maintaining start state
-        """
-       
-        print("\nExiting example...")
-        if self._done:
-            self.set_neutral()
-        if self._rs.state().enabled:
-            print("Disabling robot...")
-            self._rs.disable()
-
-    def set_neutral(self):
-        # Sets the head back into a neutral pose
-        self._head.set_pan(0.0)
-
-    def wobbleSlow(self, angle) :
-        # Sets the head place to a gaven position with a given speed
-        print baxter_interface.HEAD_PAN_ANGLE_TOLERANCE
-        if angle > 1.5 :
-            angle = 1.5
-        elif angle < -1.5 :
-            angle = -1.5
-       
-        currentAngle = currentAngle = self.getPosition()
-        sign = 0
-        
-        if angle > currentAngle:
-            sign = 1
-        else:
-            sign = -1
-
-        control_rate = rospy.Rate(100)
-        
-        while not abs(angle - currentAngle) <= 0.1:
-            currentAngle = currentAngle + sign*0.07
-            #print "calculated angle: {}".format(currentAngle)
-            self._head.set_pan(currentAngle, speed=0.3, timeout=0)
-            #self._head.set_pan(currentAngle)
-            control_rate.sleep()
-            currentAngle = self.getPosition()
-           # print "current: {}, sign: {}".format(currentAngle, sign)     
-        
-        
-
-    def wobble(self, angle) :
-        # Sets the head place to a gaven position
-        if angle > 1.5 :
-            angle = 1.5
-        elif angle < -1.5 :
-            angle = -1.5
-        if self._rs.state().enabled:
-            print "Wobbling is apllicating"
-            self._head.set_pan(angle)
-
-    def getPosition(self):
-        # get the angle of the baxter's head's in the current time.
-        return self._head.pan()
-
-    def isEnable(self):
-        self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
-        return self._rs.state().enabled
-        
diff --git a/scripts/screen_listener.py b/scripts/screen_listener.py
index e15ae399825db3b6834067bf077d0ff3105c35a3..4c585077a3863bf0d8a2c3304a14ce33d847f095 100755
--- a/scripts/screen_listener.py
+++ b/scripts/screen_listener.py
@@ -12,24 +12,11 @@ Emotions:
     -> "confused"
     -> "panic"
     -> "bored"
-    -> "crafty"
 Actions:
     -> "look_<x>_<y>"
     -> "look_<x>_<y>_<time>"
-    -> "skin_<number>"
-    -> "human_follow_on"
-    -> "human_follow_off"
-    -> "arm_follow_off"
-    -> "left_arm_follow_on"
-    -> "right_arm_follow_on"
-    -> "dynamic_look_<x>_<y>"
-    -> "dynamic_human_follow_on"
     -> "wake_up"
     -> "sleep"
-Wobbling:
-    -> "enable"
-    -> "disable" 
-    -> "wobble_<angle>" angle should be between[-1.5, 1.5]
 Other:
     -> "exit"
     -> "wake_up"
@@ -57,28 +44,12 @@ import math
 wobbler = None
 
 face = Face.Face()
-humanFollowControl = False # if this variable will be true, baxter follows the humans 
-dynamicControl = False
-armFollowControl = False # if this variable will be true, baxter follows its determined arm
-isItLeftArm = True
 defaultMsg = "This message is for controlling the msgs (is it the same with previous one?)"
 
-# helpers: handle the noise while following the arm
-xAxisLeft = 0
-yAxisLeft = 0
-xAxisRight = 0
-yAxisRight = 0
-c = 0.75 #This variable keeps the distance between origin and head
 
 # helpers: handle the noise while following the human
-elementOfHandleList = 0
-coor = 0
+
 isSystemRun = True
-oldCoor = 0
-xOld = 0
-pKOld = 1
-sizeOfHandleList = 35
-handleList = [0]*35
 
 def isInAvailablePercentage(minimum, current, percentage):
     rangeOfPercentage = percentage / 100.0
@@ -90,7 +61,7 @@ def isInAvailablePercentage(minimum, current, percentage):
 # publish image is a function which displays the image given with parameter. Image Type: Numpy array
 def publish_image(img):
     msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding="rgba8")
-    pub = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)
+    pub = rospy.Publisher('/robot/expression_diplay', Image, latch=True, queue_size=1)
     pub.publish(msg)
 
 # Statistical Functions
@@ -180,6 +151,10 @@ def callback_Command(data):
             face.emotion_default(cv2, publish_image)
             print "Emotion neutral is applicated"
 
+        elif msgs[0] == "wakeup":
+            face.wakeUp(cv2, publish_image)
+            print "Emotion wakeup is applied"
+
         elif msgs[0] == "exit" :
             print "Program is closing..."
             face.sleep(cv2, publish_image)
@@ -188,15 +163,6 @@ def callback_Command(data):
             isSystemRun = False
             sys.exit()
 
-  #      elif msgs[0] == "enable" :
-  #          wobbler.enable()
-  #          wobbler.wobble(0.0)
-  #          print "Wobbler is enabled and wobbled to 0.0"
-
-   #     elif msgs[0] == "disable" :
-   #         wobbler.disable()
-   #         print "Wobbler is disabled"
-
         elif msgs[0] == "sleep" :
             face.sleep(cv2, publish_image)
             print "Sst! Baxter is sleeping right now"
@@ -204,17 +170,12 @@ def callback_Command(data):
         defaultMsg = msg
     
     elif len(msgs) == 2 and msg != defaultMsg :
-        
+
         if msgs[0] == "skin" :
             numberOfSkin = int(msgs[1]) 
             face.skin.setSkin(numberOfSkin)
             face.show(publish_image)
         
- #       elif msgs[0] == "wobble" :
- #           angle = float(msgs[1])
- #           wobbler.wobble(angle)
- #           print "Wobbling is applicated"
-
         elif msgs[0] == "wake" and msgs[1] == "up" :
             face.wakeUp(cv2, publish_image)
             print "Baxter woke up"
@@ -227,32 +188,7 @@ def callback_Command(data):
             y = int(msgs[2])
             face.lookWithMotion(cv2, x, y, 0.8, publish_image)
 
-        elif msgs[0] == "human" and msgs[1] == "follow" and msgs[2] == "on" :
-            face.lookWithMotion(cv2, 0, 0, 0.5, publish_image)
-            humanFollowControl = True
-            armFollowControl = False
-            dynamicControl = False
- #           wobbler.enable()
- #           wobbler.wobble(0.0)
-            print "Human following mod on"
-
-        elif msgs[0] == "human" and msgs[1] == "follow" and msgs[2] == "off" :
-            humanFollowControl = False
-            dynamicControl = False
-            print "Human following mod off"
-            face.lookWithMotion(cv2, 0, 0, 0.5, publish_image)
-            dynamicControl = False
-  #          wobbler.enable()
-  #          wobbler.wobble(0.0)
-
-        elif msgs[0] == "arm" and msgs[1] == "follow" and msgs[2] == "off" :
-            armFollowControl = False
-            print "Arm following mod off"
-            face.lookWithMotion(cv2, 0, 0, 0.5, publish_image)
-            dynamicControl = False
-            wobbler.enable()
-            wobbler.wobble(0.0)
-        defaultMsg = msg
+
     
     elif len(msgs) == 4 and msg != defaultMsg :
         if msgs[0] == "look" :
@@ -261,167 +197,7 @@ def callback_Command(data):
             second = float(msgs[3])
             face.lookWithMotion(cv2, x, y, second, publish_image)
 
-        elif msgs[0] == "dynamic" and msgs[1] == "look" :
-            x = int(msgs[2])
-            y = int(msgs[3])
-            face.lookWithMotionDynamic(cv2, x, y, 0.5, publish_image, wobbler)
-
-        elif msgs[0] == "dynamic" and msgs[1] == "human" and msgs[2] == "follow" and msgs[3] == "on" :
-            humanFollowControl = True
-            armFollowControl = False
-            dynamicControl = True
-            print "Human following mod on"
-
-        elif msgs[0] == "left" and msgs[1] == "arm" and msgs[2] == "follow" and msgs[3] == "on" :
-            humanFollowControl = False
-            armFollowControl = True
-            isItLeftArm = True
-            dynamicControl = False
-            wobbler.enable()
-            wobbler.wobble(0.0)
-            print "Left arm following mod on"
-
-        elif msgs[0] == "right" and msgs[1] == "arm" and msgs[2] == "follow" and msgs[3] == "on" :
-            humanFollowControl = False
-            armFollowControl = True
-            isItLeftArm = False
-            dynamicControl = False
-            wobbler.enable()
-            wobbler.wobble(0.0)
-            print "Right arm following mod on"
-        defaultMsg = msg
-    
-    elif len(msgs) == 5 and msg != defaultMsg :
-        if msgs[0] == "dynamic" and msgs[1] == "right" and msgs[2] == "arm" and msgs[3] == "follow" and msgs[4] == "on" :
-            humanFollowControl = False
-            armFollowControl = True
-            isItLeftArm = False
-            dynamicControl = True
-            print "Dynamic right arm following mod on"
-
-        if msgs[0] == "dynamic" and msgs[1] == "left" and msgs[2] == "arm" and msgs[3] == "follow" and msgs[4] == "on" :
-            humanFollowControl = False
-            armFollowControl = True
-            isItLeftArm = True
-            dynamicControl = True
-            print "Dynamic left arm following mod on"
-        defaultMsg = msg
-
 
-# this function for the human following
-def callback_human_follow(msg):
-
-    global xOld 
-    global pKOld 
-    global coor
-    global elementOfHandleList
-    global oldCoor
-    sonarIDs = msg.channels[0].values 
-    sonarDistances = msg.channels[1].values
-    #r is a standart deviation of the sonar sensors' values.
-    r = 0.50635561 
-
-    #arrayOfSonarID is sensor shoul be proccessed
-    arrayOfSonarID = humanFollowNoiseElimination(sonarIDs, sonarDistances) 
-    numberOfData = len(arrayOfSonarID)
-    
-    if numberOfData > 0:
-        meanOfSonarID = mean(arrayOfSonarID)
-        # Kalman Filter Part
-        K = pKOld / (pKOld + r) 
-        x = xOld + K*(meanOfSonarID-xOld)
-        pK = (1-K) * pKOld
-        prob = 0.03 # Prob value determines that how much measured value effect the kalman filter value.
-        x = (x * (1-prob)) + meanOfSonarID*prob # Result of the kalman filter
-
-        # Meaning of the last 35 value
-        handleList[elementOfHandleList] = x
-        elementOfHandleList += 1
-        elementOfHandleList %= sizeOfHandleList
-
-        # Output of the value
-        xOld = x
-        pKOld = pK
-        
-        value = int(mean(handleList) * 26.67)
-        oldCoor = coor
-        coor = value #Coor is the coordinate of the object according to robot's eye
-        #print "Coor: {}, SumOfSensors: {}".format(coor, sum(arrayOfSonarID))
-
-def humanFollowNoiseElimination(sonarIDs, sonarDistances) :
-    arrayOfSonarID = []
-    numberOfData = len(sonarIDs)
-    counter = 0
-    minimumIndex = 0
-    maximumDistance = 2 # maximum distance as a unit of meter
-    percentageRate = 30 # to understand the object in front of it
-
-    # determine the minimum index
-    for index in range(numberOfData):
-        if (sonarIDs[index] <= 3 and sonarIDs[index] >= 0) or (sonarIDs[index] >= 9 and sonarIDs[index] <= 11):
-            if sonarDistances[index] < sonarDistances[minimumIndex]:
-                minimumIndex = index
-    
-    # Determining the values will be proccesed
-    for index in range(numberOfData):
-        if sonarIDs[index] <= 3 and sonarIDs[index] >= 0:
-            if sonarDistances[index] < maximumDistance and isInAvailablePercentage(sonarDistances[minimumIndex], sonarDistances[index], percentageRate):
-                levelOfSonar = float(sonarIDs[index])*(-1) # resizing the value between [-3, 3]
-                arrayOfSonarID.append(levelOfSonar)
-                counter += 1
-                continue
-        
-        elif sonarIDs[index] >= 9 and sonarIDs[index] <= 11:
-            if sonarDistances[index] < maximumDistance and isInAvailablePercentage(sonarDistances[minimumIndex], sonarDistances[index], percentageRate):
-                levelOfSonar = (12-float(sonarIDs[index])) # resizing the value between [-3, 3]
-                arrayOfSonarID.append(levelOfSonar)
-                counter += 1
-                continue
-
-    # Eliminate the outliers
-    if counter > 0 :
-        arrayOfSonarID = eliminateOutliers(arrayOfSonarID, 1.3)
-    
-    return arrayOfSonarID
-
-def callback_left_arm_follow(msg) :
-    global xAxisLeft
-    global yAxisLeft
-    # taken the coordinates
-    x = msg.pose.position.x
-    y = msg.pose.position.y
-    z = msg.pose.position.z
-
-    # angle calculation of y axis
-    yAxisAngle = math.atan(abs(x)/(z-c)) 
-    if yAxisAngle < 0:
-        yAxisAngle = (-3.14/2)-yAxisAngle
-    else:
-        yAxisAngle = (3.14/2)-yAxisAngle
-    if isItLeftArm == True :
-        yAxisLeft = (-76.394) * ( yAxisAngle )
-        xAxisLeft = (57.294) * ( math.atan(y/abs(x)) )
-
-def callback_right_arm_follow(msg):
-    global xAxisRight
-    global yAxisRight
-    # taken the coordinates
-    x = msg.pose.position.x
-    y = msg.pose.position.y
-    z = msg.pose.position.z
-
-    # angle calculation of y axis
-    yAxisAngle = math.atan(abs(x)/(z-c)) 
-    if yAxisAngle < 0:
-        yAxisAngle = (-3.14/2)-yAxisAngle
-    else:
-        yAxisAngle = (3.14/2)-yAxisAngle
-
-    if isItLeftArm == False :
-        yAxisRight = (-76.394) * ( yAxisAngle )
-        xAxisRight = (57.294) * ( math.atan(y/abs(x)) )
-
-    """ Main Functions """
 
 def main():
     global wobbler
@@ -433,7 +209,7 @@ def main():
 #    rospy.Subscriber('/robot/sonar/head_sonar/state', PointCloud, callback_human_follow)
 #    rospy.Subscriber('/robot/limb/left/endpoint_state', EndpointState, callback_left_arm_follow)
 #    rospy.Subscriber('/robot/limb/right/endpoint_state', EndpointState, callback_right_arm_follow)
-    rospy.Subscriber('display_chatter', String, callback_Command)
+    rospy.Subscriber('facial_expression', String, callback_Command)
     rospy.spin()
     return 0
 
@@ -455,28 +231,6 @@ def main_loop() :
             referenceTime = timeit.default_timer()
             print "wink motion is applicated"
 
-        if humanFollowControl == True :
-            if oldCoor != face.eye.getPositionX():
-                if dynamicControl == False :
-                    face.eye.lookExactCoordinate(coor, 0)
-                    face.show(publish_image)
-                else : 
-                    face.lookExactCoordinateDynamic(cv2, coor, 0, publish_image, wobbler)
-                    face.show(publish_image)
-
-        elif armFollowControl == True :
-            if dynamicControl == False :
-                if isItLeftArm:
-                    face.eye.lookExactCoordinate(int(xAxisLeft), int(yAxisLeft))
-                else:
-                    face.eye.lookExactCoordinate(int(xAxisRight), int(yAxisRight))
-            else:
-                if isItLeftArm:
-                    face.lookExactCoordinateDynamic(int(xAxisLeft), int(yAxisLeft), publish_image, wobbler)
-                else:
-                    face.lookExactCoordinateDynamic(int(xAxisRight), int(yAxisRight), publish_image, wobbler)
-            face.show(publish_image)
-
         if isSystemRun == False :
             sys.exit()