diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..0fd5a77e21d3643b647c89d202fd556cedaf0097
--- /dev/null
+++ b/CMakeLists.txt
@@ -0,0 +1,204 @@
+cmake_minimum_required(VERSION 2.8.3)
+project(robot_facial_expression)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED COMPONENTS
+  actionlib
+  baxter_core_msgs
+  control_msgs
+  dynamic_reconfigure
+  rospy
+  sensor_msgs
+  std_msgs
+  trajectory_msgs
+)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+##   your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+##   * add a build_depend tag for "message_generation"
+##   * add a build_depend and a run_depend tag for each package in MSG_DEP_SET
+##   * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+##     but can be declared for certainty nonetheless:
+##     * add a run_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+##   * add "message_generation" and every package in MSG_DEP_SET to
+##     find_package(catkin REQUIRED COMPONENTS ...)
+##   * add "message_runtime" and every package in MSG_DEP_SET to
+##     catkin_package(CATKIN_DEPENDS ...)
+##   * uncomment the add_*_files sections below as needed
+##     and list every .msg/.srv/.action file to be processed
+##   * uncomment the generate_messages entry below
+##   * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+# add_message_files(
+#   FILES
+#   Message1.msg
+#   Message2.msg
+# )
+
+## Generate services in the 'srv' folder
+# add_service_files(
+#   FILES
+#   Service1.srv
+#   Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+#   FILES
+#   Action1.action
+#   Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+# generate_messages(
+#   DEPENDENCIES
+#   baxter_core_msgs#   control_msgs#   sensor_msgs#   std_msgs#   trajectory_msgs
+# )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+##   * add a build_depend and a run_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+##   * add "dynamic_reconfigure" to
+##     find_package(catkin REQUIRED COMPONENTS ...)
+##   * uncomment the "generate_dynamic_reconfigure_options" section below
+##     and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+#   cfg/DynReconf1.cfg
+#   cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if you package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+#  INCLUDE_DIRS include
+#  LIBRARIES baxter_face
+#  CATKIN_DEPENDS actionlib baxter_core_msgs control_msgs dynamic_reconfigure rospy sensor_msgs std_msgs trajectory_msgs
+#  DEPENDS system_lib
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+# include
+  ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+#   src/${PROJECT_NAME}/baxter_face.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+# add_executable(${PROJECT_NAME}_node src/baxter_face_node.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(${PROJECT_NAME}_node
+#   ${catkin_LIBRARIES}
+# )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# install(PROGRAMS
+#   scripts/my_python_script
+#   DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables and/or libraries for installation
+# install(TARGETS ${PROJECT_NAME} ${PROJECT_NAME}_node
+#   ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+#   DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+#   FILES_MATCHING PATTERN "*.h"
+#   PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+#   # myfile1
+#   # myfile2
+#   DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_baxter_face.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+#   target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/launch/robot_facial_expression.launch b/launch/robot_facial_expression.launch
new file mode 100644
index 0000000000000000000000000000000000000000..e187ae7a4c935cd05470769f0c82a1de8025569b
--- /dev/null
+++ b/launch/robot_facial_expression.launch
@@ -0,0 +1,14 @@
+<launch>
+
+  
+
+  <!-- Start screen_listener node -->
+  <node name="rsdk_xdisplay_image" pkg="robot_facial_expression" type="screen_listener.py" />
+
+  <param name = "robot_gender" value ="male"/>
+
+  <!-- Start control_physical node 
+  <node name="control" pkg="robot_facial_expression" type="control_physical.py" />
+-->
+
+</launch>
diff --git a/package.xml b/package.xml
new file mode 100644
index 0000000000000000000000000000000000000000..dc5967943aaee3d7d6a9aab4bbe1fcf4b310c89f
--- /dev/null
+++ b/package.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0"?>
+<package>
+  <name>robot_facial_expression</name>
+  <version>0.0.0</version>
+  <description>Edited version of the baxter_face package</description>
+
+  <!-- One maintainer tag required, multiple allowed, one person per tag -->
+  <!-- Example:  -->
+  <!-- <maintainer email="jane.doe@example.com">Jane Doe</maintainer> -->
+  <maintainer email="aandriella@todo.todo">digammart</maintainer>
+
+
+  <!-- One license tag required, multiple allowed, one license per tag -->
+  <!-- Commonly used license strings: -->
+  <!--   BSD, MIT, Boost Software License, GPLv2, GPLv3, LGPLv2.1, LGPLv3 -->
+  <license>TODO</license>
+
+
+  <!-- Url tags are optional, but multiple are allowed, one per tag -->
+  <!-- Optional attribute type can be: website, bugtracker, or repository -->
+  <!-- Example: -->
+  <!-- <url type="website">http://wiki.ros.org/baxter_face</url> -->
+
+
+  <!-- Author tags are optional, multiple are allowed, one per tag -->
+  <!-- Authors do not have to be maintainers, but could be -->
+  <!-- Example: -->
+  <!-- <author email="jane.doe@example.com">Jane Doe</author> -->
+
+
+  <!-- The *_depend tags are used to specify dependencies -->
+  <!-- Dependencies can be catkin packages or system dependencies -->
+  <!-- Examples: -->
+  <!-- Use build_depend for packages you need at compile time: -->
+  <!--   <build_depend>message_generation</build_depend> -->
+  <!-- Use buildtool_depend for build tool packages: -->
+  <!--   <buildtool_depend>catkin</buildtool_depend> -->
+  <!-- Use run_depend for packages you need at runtime: -->
+  <!--   <run_depend>message_runtime</run_depend> -->
+  <!-- Use test_depend for packages you need only for testing: -->
+  <!--   <test_depend>gtest</test_depend> -->
+  <buildtool_depend>catkin</buildtool_depend>
+  <build_depend>actionlib</build_depend>
+  <build_depend>baxter_core_msgs</build_depend>
+  <build_depend>control_msgs</build_depend>
+  <build_depend>dynamic_reconfigure</build_depend>
+  <build_depend>rospy</build_depend>
+  <build_depend>sensor_msgs</build_depend>
+  <build_depend>std_msgs</build_depend>
+  <build_depend>trajectory_msgs</build_depend>
+  <run_depend>actionlib</run_depend>
+  <run_depend>baxter_core_msgs</run_depend>
+  <run_depend>control_msgs</run_depend>
+  <run_depend>dynamic_reconfigure</run_depend>
+  <run_depend>rospy</run_depend>
+  <run_depend>sensor_msgs</run_depend>
+  <run_depend>std_msgs</run_depend>
+  <run_depend>trajectory_msgs</run_depend>
+
+
+  <!-- The export tag contains other, unspecified, tags -->
+  <export>
+    <!-- Other tools can request additional information be placed here -->
+
+  </export>
+</package>
diff --git a/scripts/Eye.py b/scripts/Eye.py
new file mode 100755
index 0000000000000000000000000000000000000000..bcf1975b749095529d18bc7cba456bf0a473bb1c
--- /dev/null
+++ b/scripts/Eye.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+
+'''
+@Author: Bilgehan NAL
+This file has a class which defines the shape and movment of the baxter's eyes.
+'''
+
+from PIL import Image
+import math
+import rospy
+
+class Eye:
+    
+    def __init__(self):
+        self.robot_gender = rospy.get_param("/robot_gender")
+        self.eyes = Image.open("data/"+self.robot_gender+"/baxter_eye.png")   # eye image
+        self.positionX = 0  # keeps the x position as a coordinate
+        self.positionY = 0  # keeps the y position as a coordinate
+
+    '''Eye movement functions'''
+        
+    #  This function set the new position of the eyes (in the radius)
+    def lookExactCoordinate(self, x, y):
+        #print "Look: ", x, "  -  ",  y
+        if self.dotPosition(x, y) <= 1:
+            self.setPositionX(x)
+            self.setPositionY(y)
+        else: 
+            rate = self.scaleRateOfRadius(x, y, self.calculateRadiusOfCircleFromGivenPoints(90, 120, self.calculateAngle(x,y)))
+            self.setPositionX(int(x*rate))
+            self.setPositionY(int(y*rate))
+    
+    #  This function calculates the position of the eye in the instant frame
+    def lookWithMotionCalculation(self, x, y, destinationX, destinationY, totalTime, instantTime):
+    
+        if totalTime > instantTime:
+            diffirenceOfX = destinationX - x
+            newPositionDiffirenceX = float(diffirenceOfX)/totalTime * instantTime
+            newPositionX = int(x + newPositionDiffirenceX)
+
+            diffirenceOfY = destinationY - y
+            newPositionDiffirenceY = float(diffirenceOfY)/totalTime * instantTime
+            newPositionY = int(y + newPositionDiffirenceY)
+            self.lookExactCoordinate(newPositionX, newPositionY)
+            return True
+        else: 
+            self.lookExactCoordinate(destinationX, destinationY)
+            return False
+
+
+    ''' Ellipse Calculations'''
+
+    def calculateRadiusOfCircleFromGivenPoints(self, a, b,theta):
+        firstPart = float(a) * float(b)
+        secondPart = (a**2) * (math.sin(theta)**2) + (b**2) * (math.cos(theta)**2)
+        secondPart = math.sqrt(secondPart)
+        return firstPart / secondPart
+
+    def calculateAngle(self, x, y):
+        if x != 0:
+            return math.atan(float(y)/float(x))
+        else: 
+            return math.atan(float(y)/float(x+0.00001))
+
+    '''This function returns the result of ellipse formula according to x and y positions.
+    If the result between [0, 1]. Eyes are in the eyes space'''
+    
+    def dotPosition(self, x, y):
+        radiusX = 80
+        radiusY = 120
+        return (float(x**2) / radiusX**2) + (float(y**2) / radiusY**2)  
+        # Formula of an ellipse
+
+    # This function calculates the rate of scaling
+    def scaleRateOfRadius(self, x, y, idealRadius): 
+        radius = math.sqrt(x**2 + y**2)
+        rate = idealRadius / radius
+        return rate
+    
+
+
+    # Encapsulation
+    
+    def getEyes(self):
+        return self.eyes
+    
+    def setPositionX(self, positionX):
+        self.positionX = positionX
+
+    def getPositionX(self):
+        return self.positionX
+
+    def setPositionY(self, positionY):
+        self.positionY = positionY
+
+    def getPositionY(self):
+        return self.positionY
+
+   
+
+
diff --git a/scripts/Eye.pyc b/scripts/Eye.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ff6a2a6488b43c93e1fdc6c978d5acff8a6751ed
Binary files /dev/null and b/scripts/Eye.pyc differ
diff --git a/scripts/Eyebrow.py b/scripts/Eyebrow.py
new file mode 100755
index 0000000000000000000000000000000000000000..cd7b2b69e7d7657567ad8fb677b230557c72461d
--- /dev/null
+++ b/scripts/Eyebrow.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+"""
+@Author: Bilgehan NAL
+This file has a class which defines the mouth of the baxter's face.
+"""
+
+from PIL import Image
+import rospy
+
+class Eyebrow:
+
+    indexOfEyebrow = 0  # choosen element of the array
+
+    def __init__(self, initEyebrow):
+        self.robot_gender = rospy.get_param("/robot_gender")
+        # This array keeps the diffirent shape of eyebrow
+        self.eyebrows = [
+            Image.open("data/"+self.robot_gender+"/eyebrow/baxter_eyebrow_0.png"),
+            Image.open("data/"+self.robot_gender+"/eyebrow/baxter_eyebrow_1.png"),
+            Image.open("data/"+self.robot_gender+"/eyebrow/baxter_eyebrow_2.png"),
+            Image.open("data/"+self.robot_gender+"/eyebrow/baxter_eyebrow_3.png"),
+            Image.open("data/"+self.robot_gender+"/eyebrow/baxter_eyebrow_4.png")
+        ]
+
+        self.indexOfEyebrow = initEyebrow
+    
+
+    # Encapsulation
+
+    def setEyebrow(self, eyebrow):
+        self.indexOfEyebrow = eyebrow
+
+    def getEyebrow(self):
+        return self.eyebrows[self.indexOfEyebrow]
\ No newline at end of file
diff --git a/scripts/Eyebrow.pyc b/scripts/Eyebrow.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7f26fede1df2a0449bf57e952549f41022d24c6d
Binary files /dev/null and b/scripts/Eyebrow.pyc differ
diff --git a/scripts/Eyelid.py b/scripts/Eyelid.py
new file mode 100755
index 0000000000000000000000000000000000000000..e96eb830623b21d2c516f4d0a5566c27b607c82e
--- /dev/null
+++ b/scripts/Eyelid.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+'''
+@Author: Bilgehan NAL
+This file has class which defines the eyelid of the baxter
+'''
+
+from PIL import Image
+import rospy
+
+class Eyelid:
+
+    def __init__(self):
+        self.robot_gender = rospy.get_param("/robot_gender")
+        self.eyelid = Image.open("data/"+self.robot_gender+"/baxter_eyelid.png") # eyelid image
+        self.position = 0 # y position, we don't need x position because of vertical movment.
+
+    def moveCalculation(self, position, destinationPosition, totalTime, instantTime):
+    
+        if totalTime > instantTime:
+            diffirenceOfPosition = destinationPosition - position
+            newPositionDiffirencePosition = float(diffirenceOfPosition)/totalTime * instantTime
+            newPositionPosition = int(position + newPositionDiffirencePosition)
+            self.setPosition(newPositionPosition)
+            return True
+        else: 
+            self.setPosition(destinationPosition)
+            return False
+
+    # Encapsulation
+    
+    def setPosition(self, position):
+        self.position = position
+
+    def getPosition(self):
+        return self.position
+
+    def getEyelid(self):
+        return self.eyelid
\ No newline at end of file
diff --git a/scripts/Eyelid.pyc b/scripts/Eyelid.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5114c50444f883c8456520e7c7e28a5cdf74d3c6
Binary files /dev/null and b/scripts/Eyelid.pyc differ
diff --git a/scripts/Face.py b/scripts/Face.py
new file mode 100755
index 0000000000000000000000000000000000000000..455e7bc649b90e0c5072e4d16906dcb9ac70e344
--- /dev/null
+++ b/scripts/Face.py
@@ -0,0 +1,306 @@
+#!/usr/bin/env python
+
+'''
+@Author: Bilgehan NAL
+This file has a class which defines the face of baxter
+Face class has other part of the face objects.
+'''
+
+'''
+Baxter Face Descriptions:
+
+Skin, Mouth and Eyebrow has multiple shapes.
+Skin has 5 -> [0, 5]
+Mouth has 7 -> [0, 6]
+Eyebrow has 5 -> [0, 4]
+
+Skin :: 1->5 : yellow skin -> red skin
+        -> 6 is sleeping skin
+Mouth ::
+    0 -> angry mouth
+    1 -> boring mouth
+    2 -> confused mouth
+    3 -> sad mouth
+    4 -> smiling tooth mouth
+    5 -> smiling mouth
+    6 -> smiling open mouth
+Eyebrow ::
+    0 -> normal eyebrow
+    1 -> ( ): eyebrow
+    2 -> One eyebrow in the high
+    3 -> angry eyebrow
+    4 -> Kucuk Emrah Eyebrow
+
+Coordinate range for x: [-80, 80]
+Coordinate range for y: [-120, 120]
+
+'''
+
+from PIL import Image
+import Skin
+import Mouth
+import Eyebrow
+import Eye
+import Eyelid
+from numpy import array
+import timeit
+import cv2
+import os
+import random
+import getpass
+import time
+import rospy
+
+class Face:
+
+    def __init__(self):
+        self.robot_gender = rospy.get_param("/robot_gender")
+
+        # determine the path and set the default path place
+        os.chdir(r'/home/{}/pal/cognitive_game_ws/src/robot_facial_expression/scripts'.format(getpass.getuser()))
+        ''' Parts of the face of baxter are defined.'''
+        self.backgroundImage = Image.open("data/"+self.robot_gender+"/baxter_background.png") # Background behind the eyes
+        # Face partions objects
+        self.skin = Skin.Skin(5) # range: [0, 5]
+        self.mouth = Mouth.Mouth(2) # range: [0, 6]
+        self.eyebrow = Eyebrow.Eyebrow(1) # range: [0, 3]
+        self.eye = Eye.Eye()
+        self.eyelid = Eyelid.Eyelid()
+        self.eyelid.setPosition(-330)
+        self.eyesCoordinateX = self.eye.getPositionX()
+        self.angleOfView = 0.25
+    # buildFace function is combining the all face parts together.
+
+    def buildFace(self):
+        # Merging the layers
+        faceImage = self.backgroundImage.copy()
+        faceImage.paste(self.eye.getEyes(), (int(self.eye.getPositionX()), int(self.eye.getPositionY())), self.eye.getEyes())
+        faceImage.paste(self.eyelid.getEyelid(), (0, self.eyelid.getPosition()), self.eyelid.getEyelid())
+        faceImage.paste(self.skin.getSkin(), (0, 0), self.skin.getSkin())
+        faceImage.paste(self.mouth.getMouth(), (0, 0), self.mouth.getMouth())
+        faceImage.paste(self.eyebrow.getEyebrow(), (0, 0), self.eyebrow.getEyebrow())
+        image = array(faceImage)
+        return image
+
+    def show(self, publish):
+        image = self.buildFace()
+        publish(image)
+
+    # Reposition of the eyes of the baxter
+    # This function provide with the eyes' simulation movement
+    def lookWithMotion(self, cv2, destinationX, destinationY, time, publish):
+        """
+        Look with motion is a looking style with an animation
+        Animation is generated like this:
+            Eyes go to the given coordinates in a fiven time. in a loop
+        """
+        startTime = timeit.default_timer()
+        currentTime = timeit.default_timer()
+        x = self.eye.getPositionX()
+        y = self.eye.getPositionY()
+
+        while(self.eye.lookWithMotionCalculation(x, y, destinationX, destinationY, time, currentTime-startTime)):
+            image = self.buildFace()
+            publish(image) # this part is for the baxter's face
+            currentTime = timeit.default_timer()
+
+    """ 
+    Dynamic looking functions are recalculates the x value according to the head of the Baxter's position
+    if the goal coordinate is not in the angle of view of Baxter. -> Wobbling the head joint
+    """
+
+    def lookWithMotionDynamic(self, cv2, destinationX, destinationY, time, publish, wobbler):
+        # if it is not initilized don't applicate the function
+        if wobbler != None: 
+            # taking head position as a coordinate
+            headPositionRadian = wobbler.getPosition()
+            headPositionCoordinate = self.radianToCoordinate(headPositionRadian)
+            # control for goal coordinate is not in the angle of view of Baxter
+            if abs(destinationX - headPositionCoordinate) > self.radianToCoordinate(self.angleOfView):
+                # wobbling -> look at the given coordinates physicly
+                print "Wobbling to: ", destinationX
+                wobbler.wobble(self.coordinateToRadian(destinationX))
+                self.eye.lookExactCoordinate(0, destinationY)
+                image = self.buildFace()
+                publish(image)
+            else:
+                # Normal looking with eyes with an animation
+                destinationX = destinationX - headPositionCoordinate
+                self.lookWithMotion(cv2, destinationX, destinationY, time, publish)
+
+    def lookExactCoordinateDynamic(self, destinationX, destinationY, publish, wobbler):
+        # Looking the given coordinate according to the position of the head.
+        if wobbler != None: 
+            # taking head position as a coordinate
+            headPositionRadian = wobbler.getPosition()
+            headPositionCoordinate = self.radianToCoordinate(headPositionRadian)
+            # control for goal coordinate is not in the angle of view of Baxter
+            if abs(destinationX - headPositionCoordinate) > self.radianToCoordinate(self.angleOfView):
+                # wobbling -> look at the given coordinates physicly
+                print "Wobbling to: ", destinationX
+                wobbler.wobble(self.coordinateToRadian(destinationX))
+                self.eye.lookExactCoordinate(0, destinationY)  
+            else:
+                # Normal looking with eyes with an animation
+                destinationX = destinationX - headPositionCoordinate
+                self.eye.lookExactCoordinate(destinationX, destinationY)
+            image = self.buildFace()
+            publish(image)
+
+    
+    """
+    Winkmove functions sets the position of the eyelid with an animation.
+    """
+
+    def winkMove(self, cv2, destinationPosition, time, publish):
+        # Animation initial values
+        startTime = timeit.default_timer()
+        currentTime = timeit.default_timer()
+        position = self.eyelid.getPosition()
+        # Animation part
+        while(self.eyelid.moveCalculation(position, destinationPosition, time, currentTime-startTime)):
+            image = self.buildFace()
+            publish(image)
+            currentTime = timeit.default_timer()
+            
+    def wink(self, cv2, publish):
+        firstPosition = self.eyelid.getPosition()
+        self.winkMove(cv2, 0, 0.3, publish)
+        self.winkMove(cv2, firstPosition, 0.2, publish)
+        self.eyelid.setPosition(firstPosition)
+        self.show(publish)
+
+    # Encapsulation
+
+    def getSkin(self):
+        return self.skin
+
+    def getMouth(self):
+        return self.mouth
+
+    def getEyebrow(self):
+        return self.eyebrow
+
+    def getEye(self):
+        return self.eye
+
+    def getEyelid(self):
+        return self.eyelid
+
+    def getBackgroundImage(self):
+        return self.backgroundImage
+
+
+    # Emotions
+
+    def showEmotion(self, mouthIndex, eyebrowIndex, cv2, publish):
+        self.mouth.setMouth(mouthIndex)
+        self.eyebrow.setEyebrow(eyebrowIndex)
+        self.show(publish)
+
+    def sleep(self, cv2, publish):
+       self.winkMove(cv2, 0, 0.6, publish) # Eyelids are not seen.
+       self.skin.setSkin(5) # range: [0, 5]
+       self.showEmotion(1, 1, cv2, publish)
+
+    def wakeUp(self, cv2, publish):
+        self.winkMove(cv2, -330, 0.8, publish) # Eyelids are not seen.
+        self.skin.setSkin(3) # range: [0, 5]
+        self.showEmotion(5, 0, cv2, publish)
+
+    def emotion_default(self, cv2, publish):
+       self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
+       self.skin.setSkin(2)
+       self.showEmotion(5, 0, cv2, publish)
+    
+    def emotion_happy(self, cv2, publish):
+        mouthArray = [4, 6]
+        eyeBrowArray = [0, 1]
+        self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
+        self.skin.setSkin(2)
+        mouthIndex = 4#random.choice(mouthArray)
+        eyebrowIndex = random.choice(eyeBrowArray)
+        self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
+
+    def emotion_neutral(self, cv2, publish):
+        eyeBrowArray = [0, 1]
+        self.winkMove(cv2, -330, 0.3, publish)  # Eyelids are not seen.
+        self.skin.setSkin(2)
+        mouthIndex = 6  # random.choice(mouthArray)
+        eyebrowIndex = random.choice(eyeBrowArray)
+        self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
+
+    def emotion_angry(self, cv2, publish):
+        mouthArray = [0, 3]
+        eyeBrowArray = [2, 3]
+        self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
+        self.skin.setSkin(4)
+        mouthIndex = random.choice(mouthArray)
+        eyebrowIndex = random.choice(eyeBrowArray)
+        self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
+
+    def emotion_confused(self, cv2, publish):
+        mouthArray = [2]
+        eyeBrowArray = [0, 1]
+        self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
+        self.skin.setSkin(3)
+        mouthIndex = random.choice(mouthArray)
+        eyebrowIndex = random.choice(eyeBrowArray)
+        self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
+
+    def emotion_sad(self, cv2, publish):
+        mouthArray = [1, 3]
+        eyeBrowArray = [4]
+        self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
+        self.skin.setSkin(1)
+        mouthIndex = random.choice(mouthArray)
+        eyebrowIndex = random.choice(eyeBrowArray)
+        self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
+
+    def emotion_panic(self, cv2, publish):
+        mouthArray = [2]
+        eyeBrowArray = [1]
+        self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
+        self.skin.setSkin(0)
+        mouthIndex = random.choice(mouthArray)
+        eyebrowIndex = random.choice(eyeBrowArray)
+        self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
+
+    def emotion_bored(self, cv2, publish):
+        mouthArray = [1]
+        eyeBrowArray = [0, 2, 3]
+        self.winkMove(cv2, -150, 0.3, publish) # Eyelids are in the middle of the eyes.
+        self.skin.setSkin(2)
+        mouthIndex = random.choice(mouthArray)
+        eyebrowIndex = 2#random.choice(eyeBrowArray)
+        self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
+
+    def emotion_crafty(self, cv2, publish):
+        mouthArray = [4, 6]
+        eyeBrowArray = [2, 3]
+        self.winkMove(cv2, -330, 0.3, publish) # Eyelids are not seen.
+        self.skin.setSkin(3)
+        mouthIndex = 4#random.choice(mouthArray)
+        eyebrowIndex = random.choice(eyeBrowArray)
+        self.showEmotion(mouthIndex, eyebrowIndex, cv2, publish)
+
+    def testAllImages(self, cv2, publish):
+        for index in range(6):
+            self.skin.setSkin(index)
+            self.show(publish)
+        for index in range(7):
+            self.showEmotion(index, 0, cv2, publish)
+            time.sleep(0.1)
+        for index in range(5):
+            self.showEmotion(1, index, cv2, publish)
+            time.sleep(0.1)
+
+
+    """ Head Joint move calculations """
+
+    def coordinateToRadian(self, theta) :
+        return (3 * theta) / 160.0
+
+    def radianToCoordinate(self, coordinate) :
+        return (160 * coordinate) / 3
+
diff --git a/scripts/Face.pyc b/scripts/Face.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fc9db794fbbe5b61f8c9eddbb6f3310497da69d6
Binary files /dev/null and b/scripts/Face.pyc differ
diff --git a/scripts/Mouth.py b/scripts/Mouth.py
new file mode 100755
index 0000000000000000000000000000000000000000..f28bb5edd0bf022b62c50ddd039591565175da20
--- /dev/null
+++ b/scripts/Mouth.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+"""
+@Author: Bilgehan NAL
+This file has a class which defines the mouth of the baxter's face.
+"""
+
+from PIL import Image
+import rospy
+
+class Mouth:
+
+    indexOfMouth = 0    # choosen element of the array
+
+    def __init__(self, initMouth):
+        self.robot_gender = rospy.get_param("/robot_gender")
+
+        # This array keeps the diffirent shape of mouth
+        self.mouths = [
+            Image.open("data/"+self.robot_gender+"/mouth/baxter_mouth_angry.png"),
+            Image.open("data/"+self.robot_gender+"/mouth/baxter_mouth_boring_1.png"),
+            Image.open("data/"+self.robot_gender+"/mouth/baxter_mouth_confused.png"),
+            Image.open("data/"+self.robot_gender+"/mouth/baxter_mouth_sad_1.png"),
+            Image.open("data/"+self.robot_gender+"/mouth/baxter_mouth_smile_open_1.png"),
+            Image.open("data/"+self.robot_gender+"/mouth/baxter_mouth_neutral_1.png")
+            #Image.open("data/mouth/baxter_mouth_smile_open.png")
+        ]
+
+        self.indexOfMouth = initMouth
+
+
+    # Encapsulation
+
+    def setMouth(self, mouth):
+        self.indexOfMouth = mouth
+
+    def getMouth(self):
+        print(self.indexOfMouth)
+        return self.mouths[self.indexOfMouth]
\ No newline at end of file
diff --git a/scripts/Mouth.pyc b/scripts/Mouth.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8ef1899040974bc4381a1937119d70aa08651599
Binary files /dev/null and b/scripts/Mouth.pyc differ
diff --git a/scripts/Skin.py b/scripts/Skin.py
new file mode 100755
index 0000000000000000000000000000000000000000..74375c14aecac51f1976ee3234bd16692f8a9e4d
--- /dev/null
+++ b/scripts/Skin.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+"""
+@Author: Bilgehan NAL
+This file has a class which defines the skin of the baxter's face.
+"""
+
+from PIL import Image
+import rospy
+
+class Skin:
+    
+    indexOfSkin = 5 # choosen element of the array
+
+    def __init__(self, initSkin):
+        self.robot_gender = rospy.get_param("/robot_gender")
+
+        # This array keeps the diffirent colour version of skin
+        self.skins = [
+            Image.open("data/"+self.robot_gender+"/skin/baxter_skin_0_1.png"),
+            Image.open("data/"+self.robot_gender+"/skin/baxter_skin_0_1.png"),
+            Image.open("data/"+self.robot_gender+"/skin/baxter_skin_0_1.png"),
+            Image.open("data/"+self.robot_gender+"/skin/baxter_skin_3.png"),
+            Image.open("data/"+self.robot_gender+"/skin/baxter_skin_4.png"),
+            Image.open("data/"+self.robot_gender+"/skin/baxter_skin_5.png")
+        ]
+
+
+
+        self.indexOfSkin = initSkin
+    
+
+    # Encapsulation 
+
+    def setSkin(self, skinNo):
+        self.indexOfSkin = skinNo
+
+    def getSkin(self):
+        return self.skins[self.indexOfSkin]
\ No newline at end of file
diff --git a/scripts/Skin.pyc b/scripts/Skin.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a52dd11f2a200eb017f11932bacaf5c5e5a4522f
Binary files /dev/null and b/scripts/Skin.pyc differ
diff --git a/scripts/Voice_Recogniser.py b/scripts/Voice_Recogniser.py
new file mode 100755
index 0000000000000000000000000000000000000000..a68e95a38aab96a4b90471d3d7c1d8208f74e737
--- /dev/null
+++ b/scripts/Voice_Recogniser.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+
+'''
+@Author: Bilgehan NAL
+Voice Recogniser is a class to help listening the speech and converting to string
+'''
+
+import speech_recognition as sr
+import io
+import sys
+
+class Voice_Recogniser:
+
+    # These variables are sample language code
+    TURKEY = 'tr-TR'
+    US = 'en-US'
+    UK = 'en-GB'
+    FRANCE = 'fr-FR'
+    SPAIN = 'es-ES'
+    GERMAN = 'de-DE'
+    ITALY = 'it-IT'
+    RUSSIA = 'ru-RU'
+
+    def __init__(self):
+        # Dictionary, all comands are stored in a dictionary
+        self.commands = {'---': '---'}
+
+    def __init__(self, txt_path):
+        self.commands = {'---': '---'}
+        self.updateDictionary(txt_path)
+
+    ''' 
+    Dictionary is used for to decide the action
+    if the string(speech) includes any key value, it returns command
+    txt file should be like below
+
+    hello.be_happy
+    wellcome.be_happy
+
+    if we process the string given from listen function.
+    The string includes hello or wellcome then, it returns be_happy
+
+    '''
+    def updateDictionary(self, path):
+        #Read data line by line to a list from the txt file.
+        with io.open(path, 'r', encoding='utf-8') as file:
+            my_list = file.readlines()
+        #Seperating waited speech and commands. These two things are seperated by a character of dot(.)
+        for row in my_list:
+            command = row.encode('utf-8').split(".")
+            if len(command) == 2:
+                self.commands[command[0]] = command[1]
+                print ("Key: {}, Value: {}".format(command[0], command[1]))
+
+    # listen_language is a voice recognition function, language is given through a parameter.
+    def listen_language(self, language):
+        string = "-"
+        r = sr.Recognizer()
+        while string == "-":
+            with sr.Microphone() as source:   
+                print("Baxter is listening you...")              
+                audio = r.listen(source)    
+                print("wait...")  
+            try:
+                string = r.recognize_google(audio, language=language) #Recognize speech
+                print("Baxter thinks you said ->  " + string)
+            except sr.UnknownValueError:
+                string = "-"
+            except sr.RequestError as e:
+                print("Could not request results from Google Speech Recognition service; {0}".format(e))
+        print("Done...")
+        return string
+
+    # Default listen function, it recognises US English
+    def listen(self):
+        string = "-"
+        r = sr.Recognizer()
+        while string == "-":
+            with sr.Microphone() as source:   
+                print("Baxter is listening you...")              
+                audio = r.listen(source)    
+                print("wait...")  
+            try:
+                string = r.recognize_google(audio, language=US) #Recognize speech
+                print("Baxter thinks you said ->  " + string)
+            except sr.UnknownValueError:
+                string = "-"
+            except sr.RequestError as e:
+                print("Could not request results from Google Speech Recognition service; {0}".format(e))
+        print("Done...")
+        return string
+
+    def process(self, string):
+        enc = sys.getdefaultencoding()
+        result = "Speech could not be processed" #Default message
+        string = string.lower()#All cases are converted to lower case
+        # Search the commands in dictionary
+        for key in self.commands.keys():
+            # if the key is substring of string -> key is our commands.
+            if key in string:
+                result = self.commands[key]
+                break
+        return result.rstrip().lower().encode('utf_8')
\ No newline at end of file
diff --git a/scripts/control_physical.py b/scripts/control_physical.py
new file mode 100755
index 0000000000000000000000000000000000000000..c3153c8bb48c71bde9124f92a039179f7bd81980
--- /dev/null
+++ b/scripts/control_physical.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+
+'''
+@Author: Bilgehan NAL
+This file is a publisher node. 
+This node listens the data came from the buttons of baxter.
+According to this data user can control the baxter's face.
+
+'''
+
+import rospy
+import baxter_interface
+from std_msgs.msg import String
+import time
+
+# commands are the emotions as a list
+commands = ['default',
+'happy',
+'sad',
+'confused',
+'angry',
+'panic',
+'crafty',
+'bored']
+
+'''
+getEmotion function returns the key of the emotions according to
+the situation of the wheel.
+'''
+def getEmotion(value):
+    numberOfEmotions = len(commands)
+    rangeOfEmotions = 256.0/float(numberOfEmotions)
+    return int(float(value)/rangeOfEmotions)
+
+def main():
+    rospy.init_node("control", anonymous=True)
+    pub = rospy.Publisher('display_chatter', String, queue_size=40) #Display chatter publisher defined.
+
+    # This variables fot helping that which action will be chosed
+    indexOfEmotion = 0
+    isWakeUp = False
+    isFollow = False
+    emotion = 0
+    isEnable = False
+
+    '''
+    Navigators are declared.
+    -> Navigators on the arms are used for changing the emotions. 
+    Okay buttons of the arm navigators are used for enable or disable robot
+    -> Navigators on the torses are unsed for the arm following action.
+    Also they are used for two actions. (Sleep and wake up)
+    '''
+    navLeftArm = baxter_interface.Navigator('left')
+    navRightArm = baxter_interface.Navigator('right')
+    navLeftTorso = baxter_interface.Navigator('torso_left')
+    navRightTorso = baxter_interface.Navigator('torso_right')
+
+    print "Controller is enable..."
+
+    while not rospy.is_shutdown():
+        # Arm navigators okay button to enable and disable
+        if navLeftTorso._state.buttons[0] or navRightTorso._state.buttons[0]:
+            if not isEnable:
+                pub.publish("enable")
+                isEnable = True
+                print "enable"
+            else:
+                pub.publish("disable")
+                isEnable = False
+                print "disable"
+        #Left arm up button to wake up
+        elif navLeftArm._state.buttons[1]:
+            pub.publish("wake_up")
+            isWakeUp = True
+            print "wake_up"
+        #Left arm down button to sleep
+        elif navLeftArm._state.buttons[2]:
+            pub.publish("sleep")
+            isWakeUp = False
+            print "sleep"
+        #Right arm buttons to follow arms
+        elif navRightArm._state.buttons[1]:
+            if isFollow:
+                pub.publish("arm_follow_off")
+                isFollow = False
+                print "arm_follow_off"
+            else:
+                pub.publish("dynamic_left_arm_follow_on")
+                isFollow = True
+                print "dynamic_left_arm_follow_on"
+
+        elif navRightArm._state.buttons[2]:
+            if isFollow:
+                pub.publish("arm_follow_off")
+                isFollow = False
+                print "arm_follow_off"
+            else:
+                pub.publish("dynamic_right_arm_follow_on")
+                isFollow = True
+                print "dynamic_right_arm_follow_on"
+        else:
+            # Wheel Control
+            currentEmotion = getEmotion(navLeftArm._state.wheel)
+            if not emotion == currentEmotion and isWakeUp:
+                pub.publish(commands[currentEmotion])
+                emotion = currentEmotion
+                print commands[currentEmotion]
+                print currentEmotion
+            continue
+        
+        print "Wait for 0.3 secs "
+        time.sleep(0.3)
+        print "Okay:"
+
+if __name__ == '__main__':
+    try:
+        main()
+    except rospy.ROSInterruptException:
+        pass
\ No newline at end of file
diff --git a/scripts/control_server.py b/scripts/control_server.py
new file mode 100755
index 0000000000000000000000000000000000000000..25bb83cbc4e3626ea630c56cf254e708a85870a2
--- /dev/null
+++ b/scripts/control_server.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+'''
+@Author: Bilgehan NAL
+This python file is a socket server.
+This server listens the messages sent to determined port
+then, publish the given data to determined topic
+'''
+
+import socket
+import rospy
+from std_msgs.msg import String
+import re
+import fcntl
+import struct
+import sys
+import getpass
+import io
+
+commands = {'---': '---'}
+
+
+def main():
+    port = 8080 # Default Port 
+    topic = 'display_chatter' # Default Topic
+    topic, port = externelArgumant(topic, port) # if there is any external topic or port, use them
+
+    rospy.init_node('voice_publisher', anonymous=True)
+    updateDictionary("/home/{}/ros_ws/src/baxter_face/scripts/data/voice_command.txt".format(getpass.getuser()))
+    pub = rospy.Publisher(topic, String, queue_size=40) # Display chatter publisher defined.
+    # Socket variables
+    TCP_IP = ""
+    TCP_PORT = port
+    BUFFER_SIZE = 256  # Normally 1024, but we want fast response
+    # Communication variables display
+    print "ip: {}".format(get_ip_address('eth0'))
+    print "port: {}".format(TCP_PORT)
+    print "topic: {}".format(topic)
+    
+    
+
+    # Socket Creation
+    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+    s.bind((TCP_IP, TCP_PORT))
+    s.listen(1)
+    conn, addr = s.accept()
+    socket.setdefaulttimeout(1)
+    print 'Connection address:', addr
+    
+    # Listen message
+    while 1:
+        try:
+            data = conn.recv(BUFFER_SIZE) # message given from client stop the server
+            data = data.decode('latin-1').encode('utf-8')
+            if not data or "exit" in data:  # To stop the server
+                print "Program is closing"
+                break
+            data = data.replace('\0', '')
+            for index in data:
+                print "String: {}".format(ord(index))
+            print u"received data: {}".format(data)
+
+            conn.send("data: {}\n".format(data))  # echo
+            if(isSpeech(data)):
+                pub.publish(process(getSpeechMessage(data)))
+            else:
+                pub.publish(data) # Publish to topic
+        except socket.timeout:
+            print "No data is detected"
+    conn.close()
+
+def isSpeech(msg):
+    msgs = msg.split("*") #this array keeps all variables
+    if len(msgs) > 1:
+        return True
+    else:
+        return False 
+
+def getSpeechMessage(msg):
+    msgs = msg.split("*")
+    if len(msgs) > 1:
+        return msgs[1]
+    else:
+        return  msgs[0] 
+
+
+
+# IP address taken
+def get_ip_address(ifname):
+    s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+    return socket.inet_ntoa(fcntl.ioctl(
+        s.fileno(),
+        0x8915,  # SIOCGIFADDR
+        struct.pack('256s', ifname[:15])
+    )[20:24])
+
+# Externel topic or port information control:
+def externelArgumant(topic, port):
+    args = sys.argv
+    length = len(args)
+    '''
+    1 argumant -> nothing
+    2 argumants -> topic
+    3 argumants -> topic and port
+    '''
+    if length == 1:
+        return topic, port
+    elif length == 2:
+        newPort = int(args[1])
+        return topic, newPort
+    elif length == 3:
+        newTopic = args[2]
+        newPort = int(args[1])
+        return newTopic, newPort
+    return topic, port
+
+def updateDictionary(path):
+        #Read data line by line to a list from the txt file.
+        with io.open(path, 'r', encoding='utf-8') as file:
+            my_list = file.readlines()
+        #Seperating waited speech and commands. These two things are seperated by a character of dot(.)
+        for row in my_list:
+            command = row.encode('utf-8').split(".")
+            if len(command) == 2:
+                commands[command[0]] = command[1]
+                print ("Key: {}, Value: {}".format(command[0], command[1]))
+
+def process(string):
+    enc = sys.getdefaultencoding()
+    result = "Speech could not be processed" #Default message
+    string = string.lower()#All cases are converted to lower case
+    # Search the commands in dictionary
+    for key in commands.keys():
+        # if the key is substring of string -> key is our commands.
+        if key in string:
+            result = commands[key]
+            break
+    return result.rstrip().lower().encode('utf_8')
+
+if __name__ == '__main__':
+    try:
+        main()
+    except rospy.ROSInterruptException:
+        pass
+
diff --git a/scripts/control_voice.py b/scripts/control_voice.py
new file mode 100755
index 0000000000000000000000000000000000000000..7b59ff5c1d2bb10e79c852b1f6227322455c4d91
--- /dev/null
+++ b/scripts/control_voice.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+
+'''
+@Author: Bilgehan NAL
+This file is a publisher node. 
+This node listens the people and provides with to control with voice.
+Google speech recognition api is used to reconite the voice.
+'''
+
+import sys
+import os
+import getpass
+import termios
+import contextlib
+import rospy
+from std_msgs.msg import String
+import Voice_Recogniser
+
+# This function is used for key listener.
+@contextlib.contextmanager
+def raw_mode(file):
+    old_attrs = termios.tcgetattr(file.fileno())
+    new_attrs = old_attrs[:]
+    new_attrs[3] = new_attrs[3] & ~(termios.ECHO | termios.ICANON)
+    try:
+        termios.tcsetattr(file.fileno(), termios.TCSADRAIN, new_attrs)
+        yield
+    finally:
+        termios.tcsetattr(file.fileno(), termios.TCSADRAIN, old_attrs)
+
+def main():
+    rospy.init_node('voice_publisher', anonymous=True)
+    pub = rospy.Publisher('display_chatter', String, queue_size=40) #Display chatter publisher defined.
+    # Recogniser initilazition: parameter is path of txt file of commands
+    recogniser = Voice_Recogniser.Voice_Recogniser("/home/{}/ros_ws/src/baxter_face/scripts/data/voice_command.txt".format(getpass.getuser()))
+    print 'exit with ^C or ESC'
+    # Detecting the push any key on keyboard
+    with raw_mode(sys.stdin):
+        try:
+            while True:
+                ch = sys.stdin.read(1)
+                if not ch or ch == chr(4) or ord(ch) == 27: # Closing program control
+                    break
+                if ord(ch) == 32: #space key detection
+                    print("Baxter is ready to listen you")
+                    command = recogniser.process(recogniser.listen_language(recogniser.TURKEY)) #command
+                    print ("applicated command: {}".format(command))
+                    pub.publish(command) #publishing the command
+        except (KeyboardInterrupt, EOFError):
+            pass
+
+if __name__ == '__main__':
+    try:
+        main()
+    except rospy.ROSInterruptException:
+        pass
\ No newline at end of file
diff --git a/scripts/data/female/baxter_background.png b/scripts/data/female/baxter_background.png
new file mode 100644
index 0000000000000000000000000000000000000000..a42e9bf8c23c1be5205323b3123a8b59215798ff
Binary files /dev/null and b/scripts/data/female/baxter_background.png differ
diff --git a/scripts/data/female/baxter_eye.png b/scripts/data/female/baxter_eye.png
new file mode 100644
index 0000000000000000000000000000000000000000..151f6f1e27e21aa98c29a37083c0903ce7e273e4
Binary files /dev/null and b/scripts/data/female/baxter_eye.png differ
diff --git a/scripts/data/female/baxter_eyelid.png b/scripts/data/female/baxter_eyelid.png
new file mode 100644
index 0000000000000000000000000000000000000000..ede9f81f0f613a1b63494241b9bb90eaec29e0c3
Binary files /dev/null and b/scripts/data/female/baxter_eyelid.png differ
diff --git a/scripts/data/female/eyebrow/baxter_eyebrow_0.png b/scripts/data/female/eyebrow/baxter_eyebrow_0.png
new file mode 100644
index 0000000000000000000000000000000000000000..e6cb9a77bdf5a5515eccb25d874b9b488817a1ab
Binary files /dev/null and b/scripts/data/female/eyebrow/baxter_eyebrow_0.png differ
diff --git a/scripts/data/female/eyebrow/baxter_eyebrow_1.png b/scripts/data/female/eyebrow/baxter_eyebrow_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..b6b789d5466eafd5e9485c402d20e70dab61a4fb
Binary files /dev/null and b/scripts/data/female/eyebrow/baxter_eyebrow_1.png differ
diff --git a/scripts/data/female/eyebrow/baxter_eyebrow_2.png b/scripts/data/female/eyebrow/baxter_eyebrow_2.png
new file mode 100644
index 0000000000000000000000000000000000000000..a2413e899c147e49f30bf9930ad6b7408f837686
Binary files /dev/null and b/scripts/data/female/eyebrow/baxter_eyebrow_2.png differ
diff --git a/scripts/data/female/eyebrow/baxter_eyebrow_3.png b/scripts/data/female/eyebrow/baxter_eyebrow_3.png
new file mode 100644
index 0000000000000000000000000000000000000000..b308ac7ebeebdaa30a84a54376fe7a18d5e55cce
Binary files /dev/null and b/scripts/data/female/eyebrow/baxter_eyebrow_3.png differ
diff --git a/scripts/data/female/eyebrow/baxter_eyebrow_4.png b/scripts/data/female/eyebrow/baxter_eyebrow_4.png
new file mode 100644
index 0000000000000000000000000000000000000000..160297d10e46637f6fb0beeeba431cab9abf61cd
Binary files /dev/null and b/scripts/data/female/eyebrow/baxter_eyebrow_4.png differ
diff --git a/scripts/data/female/mouth/baxter_mouth_angry.png b/scripts/data/female/mouth/baxter_mouth_angry.png
new file mode 100644
index 0000000000000000000000000000000000000000..200fa3130b0e90d909fc3059c2cf44ed4628432a
Binary files /dev/null and b/scripts/data/female/mouth/baxter_mouth_angry.png differ
diff --git a/scripts/data/female/mouth/baxter_mouth_boring.png b/scripts/data/female/mouth/baxter_mouth_boring.png
new file mode 100644
index 0000000000000000000000000000000000000000..0f443bb0ba4e6c08eb5acae0dd0d7b58971f91c9
Binary files /dev/null and b/scripts/data/female/mouth/baxter_mouth_boring.png differ
diff --git a/scripts/data/female/mouth/baxter_mouth_confused.png b/scripts/data/female/mouth/baxter_mouth_confused.png
new file mode 100644
index 0000000000000000000000000000000000000000..c1c47f9c12087c0930737aa25c0f0811c9077ce4
Binary files /dev/null and b/scripts/data/female/mouth/baxter_mouth_confused.png differ
diff --git a/scripts/data/female/mouth/baxter_mouth_sad.png b/scripts/data/female/mouth/baxter_mouth_sad.png
new file mode 100644
index 0000000000000000000000000000000000000000..6dce973de3d1d5103b77e490a0e9033bd492e274
Binary files /dev/null and b/scripts/data/female/mouth/baxter_mouth_sad.png differ
diff --git a/scripts/data/female/mouth/baxter_mouth_smile.png b/scripts/data/female/mouth/baxter_mouth_smile.png
new file mode 100644
index 0000000000000000000000000000000000000000..0949521c5e3ccd7ab6f3a5de956911fa1b58b720
Binary files /dev/null and b/scripts/data/female/mouth/baxter_mouth_smile.png differ
diff --git a/scripts/data/female/mouth/baxter_mouth_smile_open.png b/scripts/data/female/mouth/baxter_mouth_smile_open.png
new file mode 100644
index 0000000000000000000000000000000000000000..bbd862d2ea733f565b444faa597502e3a6233734
Binary files /dev/null and b/scripts/data/female/mouth/baxter_mouth_smile_open.png differ
diff --git a/scripts/data/female/mouth/baxter_mouth_smile_tmp.png b/scripts/data/female/mouth/baxter_mouth_smile_tmp.png
new file mode 100644
index 0000000000000000000000000000000000000000..dec5b0bc88f57023b62cc5f05c299800a5fa4307
Binary files /dev/null and b/scripts/data/female/mouth/baxter_mouth_smile_tmp.png differ
diff --git a/scripts/data/female/mouth/baxter_mouth_tooth.png b/scripts/data/female/mouth/baxter_mouth_tooth.png
new file mode 100644
index 0000000000000000000000000000000000000000..90604ee3bd55edab3845430432f847c47dc7cb81
Binary files /dev/null and b/scripts/data/female/mouth/baxter_mouth_tooth.png differ
diff --git a/scripts/data/female/skin/background.png b/scripts/data/female/skin/background.png
new file mode 100644
index 0000000000000000000000000000000000000000..09d3fe544615bffd0c3546c80464ff0e66a50df0
Binary files /dev/null and b/scripts/data/female/skin/background.png differ
diff --git a/scripts/data/female/skin/baxter_skin_0.png b/scripts/data/female/skin/baxter_skin_0.png
new file mode 100644
index 0000000000000000000000000000000000000000..1a4c4d57254bce00448d4129a075e9472ca6a91b
Binary files /dev/null and b/scripts/data/female/skin/baxter_skin_0.png differ
diff --git a/scripts/data/female/skin/baxter_skin_1.png b/scripts/data/female/skin/baxter_skin_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..6074cebb3f89088b1260c54f2eb88ec82d68ab6d
Binary files /dev/null and b/scripts/data/female/skin/baxter_skin_1.png differ
diff --git a/scripts/data/female/skin/baxter_skin_2.png b/scripts/data/female/skin/baxter_skin_2.png
new file mode 100644
index 0000000000000000000000000000000000000000..7fabf52eb7dbec507a5daef725c7e42392575d31
Binary files /dev/null and b/scripts/data/female/skin/baxter_skin_2.png differ
diff --git a/scripts/data/female/skin/baxter_skin_3.png b/scripts/data/female/skin/baxter_skin_3.png
new file mode 100644
index 0000000000000000000000000000000000000000..fa3107263c3dfc2338fa61464ad0b25c140d8827
Binary files /dev/null and b/scripts/data/female/skin/baxter_skin_3.png differ
diff --git a/scripts/data/female/skin/baxter_skin_4.png b/scripts/data/female/skin/baxter_skin_4.png
new file mode 100644
index 0000000000000000000000000000000000000000..832bedf6b439e2756b9843342025f8cbf593c701
Binary files /dev/null and b/scripts/data/female/skin/baxter_skin_4.png differ
diff --git a/scripts/data/female/skin/baxter_skin_5.png b/scripts/data/female/skin/baxter_skin_5.png
new file mode 100644
index 0000000000000000000000000000000000000000..70903ad9bbe51d8c3f9223e09d437e86c84c9e55
Binary files /dev/null and b/scripts/data/female/skin/baxter_skin_5.png differ
diff --git a/scripts/data/female/voice_command.txt b/scripts/data/female/voice_command.txt
new file mode 100644
index 0000000000000000000000000000000000000000..716323c30b28c0bf2bc8dcdecbcae5b00cbe1814
--- /dev/null
+++ b/scripts/data/female/voice_command.txt
@@ -0,0 +1,106 @@
+#display_chatter -> name of the topic as String
+#left side is waited word(s) from user
+#right side is message data to publish
+
+klasik.default
+
+mutlu.happy
+sevin.happy
+gül.happy
+gul.happy
+akıllı.happy
+akilli.happy
+
+üzül.sad
+uzul.sad
+üzgün.sad
+uzgun.sad
+kötü.sad
+kotu.sad
+
+
+kızgın.angry
+kizgin.angry
+öfke.angry
+ofke.angry
+sinir.angry
+
+şaşır.confused
+sasir.confused
+sürpriz.confused
+surpriz.confused
+
+kork.panic
+panik.panic
+
+sıkıl.bored
+sikil.bored
+sıkıcı.bored
+sikici.bored
+
+kurnaz.crafty
+
+uyu.sleep
+uyku.sleep
+görüşürüz.sleep
+gorusuruz.sleep
+
+uyan.wake_up
+kalk.wake_up
+
+normal sol kol.left_arm_follow_on
+normal sol el.left_arm_follow_on
+normal solkol.left_arm_follow_on
+normal solel.left_arm_follow_on
+
+normal saÄŸ kol.right_arm_follow_on
+normal saÄŸ el.right_arm_follow_on
+normal saÄŸkol.right_arm_follow_on
+normal saÄŸel.right_arm_follow_on
+normal sag kol.right_arm_follow_on
+normal sag el.right_arm_follow_on
+normal sagkol.right_arm_follow_on
+normal sagel.right_arm_follow_on
+
+dinamik sol kol.dynamic_left_arm_follow_on
+dinamik sol el.dynamic_left_arm_follow_on
+dinamik solkol.dynamic_left_arm_follow_on
+dinamik solel.dynamic_left_arm_follow_on
+
+dinamik saÄŸ kol.dynamic_right_arm_follow_on
+dinamik saÄŸ kol.dynamic_right_arm_follow_on
+dinamik saÄŸkol.dynamic_right_arm_follow_on
+dinamik saÄŸel.dynamic_right_arm_follow_on
+dinamik sag kol.dynamic_right_arm_follow_on
+dinamik sag kol.dynamic_right_arm_follow_on
+dinamik sagkol.dynamic_right_arm_follow_on
+dinamik sagel.dynamic_right_arm_follow_on
+
+takibi bırak.arm_follow_off
+takibi birak.arm_follow_off
+
+kapan.exit
+kapat.exit
+
+motor.enable
+hareketlen.enable
+
+kırmızı alarm.disable
+kirmizi alarm.disable
+
+saÄŸa bak.dynamic_look_-65_0
+saÄŸ tarafa bak.dynamic_look_-65_0
+saga bak.dynamic_look_-65_0
+sag tarafa bak.dynamic_look_-65_0
+sola bak.dynamic_look_65_0
+sol tarafa bak.dynamic_look_65_0
+öne bak.dynamic_look_0_0
+önüne bak.dynamic_look_0_0
+ön tarafa bak.dynamic_look_0_0
+one bak.dynamic_look_0_0
+onune bak.dynamic_look_0_0
+on tarafa bak.dynamic_look_0_0
+
+beni takip et.human_follow_on
+bana bakma.human_follow_off
+
diff --git a/scripts/data/male/baxter_background.png b/scripts/data/male/baxter_background.png
new file mode 100644
index 0000000000000000000000000000000000000000..a42e9bf8c23c1be5205323b3123a8b59215798ff
Binary files /dev/null and b/scripts/data/male/baxter_background.png differ
diff --git a/scripts/data/male/baxter_eye.png b/scripts/data/male/baxter_eye.png
new file mode 100644
index 0000000000000000000000000000000000000000..3dcb6f368d4da697fe310b1169aa5fc050d49b00
Binary files /dev/null and b/scripts/data/male/baxter_eye.png differ
diff --git a/scripts/data/male/baxter_eye_2.png b/scripts/data/male/baxter_eye_2.png
new file mode 100644
index 0000000000000000000000000000000000000000..151f6f1e27e21aa98c29a37083c0903ce7e273e4
Binary files /dev/null and b/scripts/data/male/baxter_eye_2.png differ
diff --git a/scripts/data/male/baxter_eyelid.png b/scripts/data/male/baxter_eyelid.png
new file mode 100644
index 0000000000000000000000000000000000000000..fa2c0fa5a7906ac9baabfff1c90ac3669934473a
Binary files /dev/null and b/scripts/data/male/baxter_eyelid.png differ
diff --git a/scripts/data/male/eyebrow/baxter_eyebrow_0.png b/scripts/data/male/eyebrow/baxter_eyebrow_0.png
new file mode 100644
index 0000000000000000000000000000000000000000..82cd3564cae6135bd294982164e3c3ad250bb03a
Binary files /dev/null and b/scripts/data/male/eyebrow/baxter_eyebrow_0.png differ
diff --git a/scripts/data/male/eyebrow/baxter_eyebrow_1.png b/scripts/data/male/eyebrow/baxter_eyebrow_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..0d54811bfaca1487ae87651fa173358c6175f0a2
Binary files /dev/null and b/scripts/data/male/eyebrow/baxter_eyebrow_1.png differ
diff --git a/scripts/data/male/eyebrow/baxter_eyebrow_2.png b/scripts/data/male/eyebrow/baxter_eyebrow_2.png
new file mode 100644
index 0000000000000000000000000000000000000000..a2413e899c147e49f30bf9930ad6b7408f837686
Binary files /dev/null and b/scripts/data/male/eyebrow/baxter_eyebrow_2.png differ
diff --git a/scripts/data/male/eyebrow/baxter_eyebrow_3.png b/scripts/data/male/eyebrow/baxter_eyebrow_3.png
new file mode 100644
index 0000000000000000000000000000000000000000..94df4145e4aae0a6867dc110d01c98bcc8fba3d1
Binary files /dev/null and b/scripts/data/male/eyebrow/baxter_eyebrow_3.png differ
diff --git a/scripts/data/male/eyebrow/baxter_eyebrow_4.png b/scripts/data/male/eyebrow/baxter_eyebrow_4.png
new file mode 100644
index 0000000000000000000000000000000000000000..6c1b6be28a00df531894de232ea2706c564f917a
Binary files /dev/null and b/scripts/data/male/eyebrow/baxter_eyebrow_4.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_angry.png b/scripts/data/male/mouth/baxter_mouth_angry.png
new file mode 100644
index 0000000000000000000000000000000000000000..200fa3130b0e90d909fc3059c2cf44ed4628432a
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_angry.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_boring.png b/scripts/data/male/mouth/baxter_mouth_boring.png
new file mode 100644
index 0000000000000000000000000000000000000000..0f443bb0ba4e6c08eb5acae0dd0d7b58971f91c9
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_boring.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_boring_1.png b/scripts/data/male/mouth/baxter_mouth_boring_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..dcf16855f9165f8124b914ff1e4b1f856d3607e2
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_boring_1.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_confused.png b/scripts/data/male/mouth/baxter_mouth_confused.png
new file mode 100644
index 0000000000000000000000000000000000000000..c1c47f9c12087c0930737aa25c0f0811c9077ce4
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_confused.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_neutral.png b/scripts/data/male/mouth/baxter_mouth_neutral.png
new file mode 100644
index 0000000000000000000000000000000000000000..a7aca64a15df18d322a0fed46f50881789357576
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_neutral.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_neutral_1.png b/scripts/data/male/mouth/baxter_mouth_neutral_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..e48faee2e95d0fee2243e9445078c19a06543443
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_neutral_1.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_sad.png b/scripts/data/male/mouth/baxter_mouth_sad.png
new file mode 100644
index 0000000000000000000000000000000000000000..6dce973de3d1d5103b77e490a0e9033bd492e274
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_sad.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_sad_1.png b/scripts/data/male/mouth/baxter_mouth_sad_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..1b544c2254dd462d73d3c1d4337a4161910177e8
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_sad_1.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_smile_open.png b/scripts/data/male/mouth/baxter_mouth_smile_open.png
new file mode 100644
index 0000000000000000000000000000000000000000..bbd862d2ea733f565b444faa597502e3a6233734
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_smile_open.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_smile_open_1.png b/scripts/data/male/mouth/baxter_mouth_smile_open_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..efb64663ebb3c02f0d74131dbc02cb65dd3ebe55
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_smile_open_1.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_smile_tmp.png b/scripts/data/male/mouth/baxter_mouth_smile_tmp.png
new file mode 100644
index 0000000000000000000000000000000000000000..dec5b0bc88f57023b62cc5f05c299800a5fa4307
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_smile_tmp.png differ
diff --git a/scripts/data/male/mouth/baxter_mouth_tooth.png b/scripts/data/male/mouth/baxter_mouth_tooth.png
new file mode 100644
index 0000000000000000000000000000000000000000..90604ee3bd55edab3845430432f847c47dc7cb81
Binary files /dev/null and b/scripts/data/male/mouth/baxter_mouth_tooth.png differ
diff --git a/scripts/data/male/skin/background.png b/scripts/data/male/skin/background.png
new file mode 100644
index 0000000000000000000000000000000000000000..09d3fe544615bffd0c3546c80464ff0e66a50df0
Binary files /dev/null and b/scripts/data/male/skin/background.png differ
diff --git a/scripts/data/male/skin/baxter_skin_0 (copy).png b/scripts/data/male/skin/baxter_skin_0 (copy).png
new file mode 100644
index 0000000000000000000000000000000000000000..1a4c4d57254bce00448d4129a075e9472ca6a91b
Binary files /dev/null and b/scripts/data/male/skin/baxter_skin_0 (copy).png differ
diff --git a/scripts/data/male/skin/baxter_skin_0.png b/scripts/data/male/skin/baxter_skin_0.png
new file mode 100644
index 0000000000000000000000000000000000000000..1a4c4d57254bce00448d4129a075e9472ca6a91b
Binary files /dev/null and b/scripts/data/male/skin/baxter_skin_0.png differ
diff --git a/scripts/data/male/skin/baxter_skin_0_1.png b/scripts/data/male/skin/baxter_skin_0_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..f4946e8572e67ee672cf0ba1a73787b55146a5f2
Binary files /dev/null and b/scripts/data/male/skin/baxter_skin_0_1.png differ
diff --git a/scripts/data/male/skin/baxter_skin_1.png b/scripts/data/male/skin/baxter_skin_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..6074cebb3f89088b1260c54f2eb88ec82d68ab6d
Binary files /dev/null and b/scripts/data/male/skin/baxter_skin_1.png differ
diff --git a/scripts/data/male/skin/baxter_skin_2.png b/scripts/data/male/skin/baxter_skin_2.png
new file mode 100644
index 0000000000000000000000000000000000000000..7fabf52eb7dbec507a5daef725c7e42392575d31
Binary files /dev/null and b/scripts/data/male/skin/baxter_skin_2.png differ
diff --git a/scripts/data/male/skin/baxter_skin_3.png b/scripts/data/male/skin/baxter_skin_3.png
new file mode 100644
index 0000000000000000000000000000000000000000..fa3107263c3dfc2338fa61464ad0b25c140d8827
Binary files /dev/null and b/scripts/data/male/skin/baxter_skin_3.png differ
diff --git a/scripts/data/male/skin/baxter_skin_4.png b/scripts/data/male/skin/baxter_skin_4.png
new file mode 100644
index 0000000000000000000000000000000000000000..832bedf6b439e2756b9843342025f8cbf593c701
Binary files /dev/null and b/scripts/data/male/skin/baxter_skin_4.png differ
diff --git a/scripts/data/male/skin/baxter_skin_5.png b/scripts/data/male/skin/baxter_skin_5.png
new file mode 100644
index 0000000000000000000000000000000000000000..70903ad9bbe51d8c3f9223e09d437e86c84c9e55
Binary files /dev/null and b/scripts/data/male/skin/baxter_skin_5.png differ
diff --git a/scripts/data/male/voice_command.txt b/scripts/data/male/voice_command.txt
new file mode 100644
index 0000000000000000000000000000000000000000..716323c30b28c0bf2bc8dcdecbcae5b00cbe1814
--- /dev/null
+++ b/scripts/data/male/voice_command.txt
@@ -0,0 +1,106 @@
+#display_chatter -> name of the topic as String
+#left side is waited word(s) from user
+#right side is message data to publish
+
+klasik.default
+
+mutlu.happy
+sevin.happy
+gül.happy
+gul.happy
+akıllı.happy
+akilli.happy
+
+üzül.sad
+uzul.sad
+üzgün.sad
+uzgun.sad
+kötü.sad
+kotu.sad
+
+
+kızgın.angry
+kizgin.angry
+öfke.angry
+ofke.angry
+sinir.angry
+
+şaşır.confused
+sasir.confused
+sürpriz.confused
+surpriz.confused
+
+kork.panic
+panik.panic
+
+sıkıl.bored
+sikil.bored
+sıkıcı.bored
+sikici.bored
+
+kurnaz.crafty
+
+uyu.sleep
+uyku.sleep
+görüşürüz.sleep
+gorusuruz.sleep
+
+uyan.wake_up
+kalk.wake_up
+
+normal sol kol.left_arm_follow_on
+normal sol el.left_arm_follow_on
+normal solkol.left_arm_follow_on
+normal solel.left_arm_follow_on
+
+normal saÄŸ kol.right_arm_follow_on
+normal saÄŸ el.right_arm_follow_on
+normal saÄŸkol.right_arm_follow_on
+normal saÄŸel.right_arm_follow_on
+normal sag kol.right_arm_follow_on
+normal sag el.right_arm_follow_on
+normal sagkol.right_arm_follow_on
+normal sagel.right_arm_follow_on
+
+dinamik sol kol.dynamic_left_arm_follow_on
+dinamik sol el.dynamic_left_arm_follow_on
+dinamik solkol.dynamic_left_arm_follow_on
+dinamik solel.dynamic_left_arm_follow_on
+
+dinamik saÄŸ kol.dynamic_right_arm_follow_on
+dinamik saÄŸ kol.dynamic_right_arm_follow_on
+dinamik saÄŸkol.dynamic_right_arm_follow_on
+dinamik saÄŸel.dynamic_right_arm_follow_on
+dinamik sag kol.dynamic_right_arm_follow_on
+dinamik sag kol.dynamic_right_arm_follow_on
+dinamik sagkol.dynamic_right_arm_follow_on
+dinamik sagel.dynamic_right_arm_follow_on
+
+takibi bırak.arm_follow_off
+takibi birak.arm_follow_off
+
+kapan.exit
+kapat.exit
+
+motor.enable
+hareketlen.enable
+
+kırmızı alarm.disable
+kirmizi alarm.disable
+
+saÄŸa bak.dynamic_look_-65_0
+saÄŸ tarafa bak.dynamic_look_-65_0
+saga bak.dynamic_look_-65_0
+sag tarafa bak.dynamic_look_-65_0
+sola bak.dynamic_look_65_0
+sol tarafa bak.dynamic_look_65_0
+öne bak.dynamic_look_0_0
+önüne bak.dynamic_look_0_0
+ön tarafa bak.dynamic_look_0_0
+one bak.dynamic_look_0_0
+onune bak.dynamic_look_0_0
+on tarafa bak.dynamic_look_0_0
+
+beni takip et.human_follow_on
+bana bakma.human_follow_off
+
diff --git a/scripts/data/mustache.jpeg b/scripts/data/mustache.jpeg
new file mode 100644
index 0000000000000000000000000000000000000000..a153359c3e30ba48db0ec823a7dde57f8a9e87cf
Binary files /dev/null and b/scripts/data/mustache.jpeg differ
diff --git a/scripts/head_wobbler.py b/scripts/head_wobbler.py
new file mode 100755
index 0000000000000000000000000000000000000000..ee3d7f2b10658bcbd0557fef55934881a409ad35
--- /dev/null
+++ b/scripts/head_wobbler.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+
+"""
+@Author: Bilgehan NAL
+This Wobbler class is for the moving of the had joint.
+"""
+
+
+import argparse
+import rospy
+import baxter_interface
+from baxter_interface import CHECK_VERSION
+
+
+class Wobbler(object):
+
+    def __init__(self):
+        """
+        'Wobbles' the head
+        """
+        self._done = False
+        self._head = baxter_interface.Head()
+        self.tolerance = baxter_interface.HEAD_PAN_ANGLE_TOLERANCE
+        self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
+        print "Wobbler is initilized"
+
+    """ enable robot """
+
+    def enable(self):
+        # verify robot is enabled
+        print("Getting robot state... ")
+        self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
+        self._init_state = self._rs.state().enabled
+        if not self._rs.state().enabled:
+            print("Enabling robot... ")
+            self._rs.enable()
+    
+    def disable(self):
+        """
+        Exits example cleanly by moving head to neutral position and
+        maintaining start state
+        """
+       
+        print("\nExiting example...")
+        if self._done:
+            self.set_neutral()
+        if self._rs.state().enabled:
+            print("Disabling robot...")
+            self._rs.disable()
+
+    def set_neutral(self):
+        # Sets the head back into a neutral pose
+        self._head.set_pan(0.0)
+
+    def wobbleSlow(self, angle) :
+        # Sets the head place to a gaven position with a given speed
+        print baxter_interface.HEAD_PAN_ANGLE_TOLERANCE
+        if angle > 1.5 :
+            angle = 1.5
+        elif angle < -1.5 :
+            angle = -1.5
+       
+        currentAngle = currentAngle = self.getPosition()
+        sign = 0
+        
+        if angle > currentAngle:
+            sign = 1
+        else:
+            sign = -1
+
+        control_rate = rospy.Rate(100)
+        
+        while not abs(angle - currentAngle) <= 0.1:
+            currentAngle = currentAngle + sign*0.07
+            #print "calculated angle: {}".format(currentAngle)
+            self._head.set_pan(currentAngle, speed=0.3, timeout=0)
+            #self._head.set_pan(currentAngle)
+            control_rate.sleep()
+            currentAngle = self.getPosition()
+           # print "current: {}, sign: {}".format(currentAngle, sign)     
+        
+        
+
+    def wobble(self, angle) :
+        # Sets the head place to a gaven position
+        if angle > 1.5 :
+            angle = 1.5
+        elif angle < -1.5 :
+            angle = -1.5
+        if self._rs.state().enabled:
+            print "Wobbling is apllicating"
+            self._head.set_pan(angle)
+
+    def getPosition(self):
+        # get the angle of the baxter's head's in the current time.
+        return self._head.pan()
+
+    def isEnable(self):
+        self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
+        return self._rs.state().enabled
+        
diff --git a/scripts/head_wobbler.pyc b/scripts/head_wobbler.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..975c46518d5effb212a2f90c0085200605d8a132
Binary files /dev/null and b/scripts/head_wobbler.pyc differ
diff --git a/scripts/screen_listener.py b/scripts/screen_listener.py
new file mode 100755
index 0000000000000000000000000000000000000000..a2a673fa8ade71d6a027fda8e6ebb88ddf283cb3
--- /dev/null
+++ b/scripts/screen_listener.py
@@ -0,0 +1,506 @@
+#!/usr/bin/env python
+
+'''
+@Author: Bilgehan NAL
+This file is a subscriber node which listens a topic type:String
+Waited String messages:
+Emotions:
+    -> "default"
+    -> "happy"
+    -> "sad"
+    -> "angry"
+    -> "confused"
+    -> "panic"
+    -> "bored"
+    -> "crafty"
+Actions:
+    -> "look_<x>_<y>"
+    -> "look_<x>_<y>_<time>"
+    -> "skin_<number>"
+    -> "human_follow_on"
+    -> "human_follow_off"
+    -> "arm_follow_off"
+    -> "left_arm_follow_on"
+    -> "right_arm_follow_on"
+    -> "dynamic_look_<x>_<y>"
+    -> "dynamic_human_follow_on"
+    -> "wake_up"
+    -> "sleep"
+Wobbling:
+    -> "enable"
+    -> "disable" 
+    -> "wobble_<angle>" angle should be between[-1.5, 1.5]
+Other:
+    -> "exit"
+    -> "wake_up"
+    -> "sleep"
+
+
+'''
+
+import os
+import sys
+import rospy
+import timeit
+import cv2
+import cv_bridge
+import Face
+from sensor_msgs.msg import Image, PointCloud
+from std_msgs.msg import String
+import threading
+#import head_wobbler
+#from baxter_core_msgs.msg import EndpointState
+import math
+
+""" Variable Decleration """
+
+wobbler = None
+
+face = Face.Face()
+humanFollowControl = False # if this variable will be true, baxter follows the humans 
+dynamicControl = False
+armFollowControl = False # if this variable will be true, baxter follows its determined arm
+isItLeftArm = True
+defaultMsg = "This message is for controlling the msgs (is it the same with previous one?)"
+
+# helpers: handle the noise while following the arm
+xAxisLeft = 0
+yAxisLeft = 0
+xAxisRight = 0
+yAxisRight = 0
+c = 0.75 #This variable keeps the distance between origin and head
+
+# helpers: handle the noise while following the human
+elementOfHandleList = 0
+coor = 0
+isSystemRun = True
+oldCoor = 0
+xOld = 0
+pKOld = 1
+sizeOfHandleList = 35
+handleList = [0]*35
+
+def isInAvailablePercentage(minimum, current, percentage):
+    rangeOfPercentage = percentage / 100.0
+    if abs(current-minimum) < (minimum * rangeOfPercentage):
+        return True
+    else:
+        return False
+
+# publish image is a function which displays the image given with parameter. Image Type: Numpy array
+def publish_image(img):
+    msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding="rgba8")
+    pub = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)
+    pub.publish(msg)
+
+# Statistical Functions
+def mode(numbers) :
+    largestCount = 0
+    modes = []
+    for x in numbers:
+        if x in modes:
+            continue
+        count = numbers.count(x)
+        if count > largestCount:
+            del modes[:]
+            modes.append(x)
+            largestCount = count
+        elif count == largestCount:
+            modes.append(x)
+    return modes[0]
+
+def stddev(lst):
+    mean = float(sum(lst)) / len(lst)
+    return (float(reduce(lambda x, y: x + y, map(lambda x: (x - mean) ** 2, lst))) / len(lst))**0.5
+
+def mean(listX) :
+    return sum(listX) / len(listX)
+
+def eliminateOutliers(listOfData, limit) :
+    stdDeviation = stddev(listOfData)
+    meanOfList = mean(listOfData)
+
+    for element in listOfData :
+        if stdDeviation != 0 :
+            zScore = abs(element - meanOfList) / stdDeviation
+            if zScore > limit :
+                del element
+    return listOfData
+
+""" Callback Functions """
+
+def callback_Command(data):
+    global isSystemRun
+    global defaultMsg
+    global humanFollowControl
+    global armFollowControl
+    global isItLeftArm
+    global dynamicControl
+    msg = data.data.lower() # All lethers were made in small to compare.
+    print "recieved msg is : {}".format(msg)
+    msgs = msg.split("_") #this array keeps all variables
+   
+    # Messages and actions
+
+    if len(msgs) == 1 and msg != defaultMsg:
+        
+        if msgs[0] == "default" :
+            face.emotion_default(cv2, publish_image)
+            print "Default Emotion is applicated"
+
+        if msgs[0] == "happy" :
+            face.emotion_happy(cv2, publish_image)
+            print "Emotion happy is applicated"
+            
+        elif msgs[0] == "angry" :
+            face.emotion_angry(cv2, publish_image)
+            print "Emotion angry is applicated"
+
+        elif msgs[0] == "confused" :
+            face.emotion_confused(cv2, publish_image)
+            print "Emotion confused is applicated"
+
+        elif msgs[0] == "sad" :
+            face.emotion_sad(cv2, publish_image)
+            print "Emotion sad is applicated"
+
+        elif msgs[0] == "panic" :
+            face.emotion_panic(cv2, publish_image)
+            print "Emotion panic is applicated"
+
+        elif msgs[0] == "bored" :
+            face.emotion_bored(cv2, publish_image)
+            print "Emotion bored is applicated"
+
+        elif msgs[0] == "crafty" :
+            face.emotion_crafty(cv2, publish_image)
+            print "Emotion crafty is applicated"
+
+        elif msgs[0] == "neutral":
+            face.emotion_default(cv2, publish_image)
+            print "Emotion neutral is applicated"
+
+        elif msgs[0] == "exit" :
+            print "Program is closing..."
+            face.sleep(cv2, publish_image)
+            rospy.sleep(1)
+            print "Program is closed"
+            isSystemRun = False
+            sys.exit()
+
+  #      elif msgs[0] == "enable" :
+  #          wobbler.enable()
+  #          wobbler.wobble(0.0)
+  #          print "Wobbler is enabled and wobbled to 0.0"
+
+   #     elif msgs[0] == "disable" :
+   #         wobbler.disable()
+   #         print "Wobbler is disabled"
+
+        elif msgs[0] == "sleep" :
+            face.sleep(cv2, publish_image)
+            print "Sst! Baxter is sleeping right now"
+            
+        defaultMsg = msg
+    
+    elif len(msgs) == 2 and msg != defaultMsg :
+        
+        if msgs[0] == "skin" :
+            numberOfSkin = int(msgs[1]) 
+            face.skin.setSkin(numberOfSkin)
+            face.show(publish_image)
+        
+ #       elif msgs[0] == "wobble" :
+ #           angle = float(msgs[1])
+ #           wobbler.wobble(angle)
+ #           print "Wobbling is applicated"
+
+        elif msgs[0] == "wake" and msgs[1] == "up" :
+            face.wakeUp(cv2, publish_image)
+            print "Baxter woke up"
+
+        defaultMsg = msg
+
+    elif len(msgs) == 3 and msg != defaultMsg :
+        if msgs[0] == "look" :
+            x = int(msgs[1])
+            y = int(msgs[2])
+            face.lookWithMotion(cv2, x, y, 0.5, publish_image)
+
+        elif msgs[0] == "human" and msgs[1] == "follow" and msgs[2] == "on" :
+            face.lookWithMotion(cv2, 0, 0, 0.5, publish_image)
+            humanFollowControl = True
+            armFollowControl = False
+            dynamicControl = False
+ #           wobbler.enable()
+ #           wobbler.wobble(0.0)
+            print "Human following mod on"
+
+        elif msgs[0] == "human" and msgs[1] == "follow" and msgs[2] == "off" :
+            humanFollowControl = False
+            dynamicControl = False
+            print "Human following mod off"
+            face.lookWithMotion(cv2, 0, 0, 0.5, publish_image)
+            dynamicControl = False
+  #          wobbler.enable()
+  #          wobbler.wobble(0.0)
+
+        elif msgs[0] == "arm" and msgs[1] == "follow" and msgs[2] == "off" :
+            armFollowControl = False
+            print "Arm following mod off"
+            face.lookWithMotion(cv2, 0, 0, 0.5, publish_image)
+            dynamicControl = False
+            wobbler.enable()
+            wobbler.wobble(0.0)
+        defaultMsg = msg
+    
+    elif len(msgs) == 4 and msg != defaultMsg :
+        if msgs[0] == "look" :
+            x = int(msgs[1])
+            y = int(msgs[2])
+            second = float(msgs[3])
+            face.lookWithMotion(cv2, x, y, second, publish_image)
+
+        elif msgs[0] == "dynamic" and msgs[1] == "look" :
+            x = int(msgs[2])
+            y = int(msgs[3])
+            face.lookWithMotionDynamic(cv2, x, y, 0.5, publish_image, wobbler)
+
+        elif msgs[0] == "dynamic" and msgs[1] == "human" and msgs[2] == "follow" and msgs[3] == "on" :
+            humanFollowControl = True
+            armFollowControl = False
+            dynamicControl = True
+            print "Human following mod on"
+
+        elif msgs[0] == "left" and msgs[1] == "arm" and msgs[2] == "follow" and msgs[3] == "on" :
+            humanFollowControl = False
+            armFollowControl = True
+            isItLeftArm = True
+            dynamicControl = False
+            wobbler.enable()
+            wobbler.wobble(0.0)
+            print "Left arm following mod on"
+
+        elif msgs[0] == "right" and msgs[1] == "arm" and msgs[2] == "follow" and msgs[3] == "on" :
+            humanFollowControl = False
+            armFollowControl = True
+            isItLeftArm = False
+            dynamicControl = False
+            wobbler.enable()
+            wobbler.wobble(0.0)
+            print "Right arm following mod on"
+        defaultMsg = msg
+    
+    elif len(msgs) == 5 and msg != defaultMsg :
+        if msgs[0] == "dynamic" and msgs[1] == "right" and msgs[2] == "arm" and msgs[3] == "follow" and msgs[4] == "on" :
+            humanFollowControl = False
+            armFollowControl = True
+            isItLeftArm = False
+            dynamicControl = True
+            print "Dynamic right arm following mod on"
+
+        if msgs[0] == "dynamic" and msgs[1] == "left" and msgs[2] == "arm" and msgs[3] == "follow" and msgs[4] == "on" :
+            humanFollowControl = False
+            armFollowControl = True
+            isItLeftArm = True
+            dynamicControl = True
+            print "Dynamic left arm following mod on"
+        defaultMsg = msg
+
+
+# this function for the human following
+def callback_human_follow(msg):
+
+    global xOld 
+    global pKOld 
+    global coor
+    global elementOfHandleList
+    global oldCoor
+    sonarIDs = msg.channels[0].values 
+    sonarDistances = msg.channels[1].values
+    #r is a standart deviation of the sonar sensors' values.
+    r = 0.50635561 
+
+    #arrayOfSonarID is sensor shoul be proccessed
+    arrayOfSonarID = humanFollowNoiseElimination(sonarIDs, sonarDistances) 
+    numberOfData = len(arrayOfSonarID)
+    
+    if numberOfData > 0:
+        meanOfSonarID = mean(arrayOfSonarID)
+        # Kalman Filter Part
+        K = pKOld / (pKOld + r) 
+        x = xOld + K*(meanOfSonarID-xOld)
+        pK = (1-K) * pKOld
+        prob = 0.03 # Prob value determines that how much measured value effect the kalman filter value.
+        x = (x * (1-prob)) + meanOfSonarID*prob # Result of the kalman filter
+
+        # Meaning of the last 35 value
+        handleList[elementOfHandleList] = x
+        elementOfHandleList += 1
+        elementOfHandleList %= sizeOfHandleList
+
+        # Output of the value
+        xOld = x
+        pKOld = pK
+        
+        value = int(mean(handleList) * 26.67)
+        oldCoor = coor
+        coor = value #Coor is the coordinate of the object according to robot's eye
+        #print "Coor: {}, SumOfSensors: {}".format(coor, sum(arrayOfSonarID))
+
+def humanFollowNoiseElimination(sonarIDs, sonarDistances) :
+    arrayOfSonarID = []
+    numberOfData = len(sonarIDs)
+    counter = 0
+    minimumIndex = 0
+    maximumDistance = 2 # maximum distance as a unit of meter
+    percentageRate = 30 # to understand the object in front of it
+
+    # determine the minimum index
+    for index in range(numberOfData):
+        if (sonarIDs[index] <= 3 and sonarIDs[index] >= 0) or (sonarIDs[index] >= 9 and sonarIDs[index] <= 11):
+            if sonarDistances[index] < sonarDistances[minimumIndex]:
+                minimumIndex = index
+    
+    # Determining the values will be proccesed
+    for index in range(numberOfData):
+        if sonarIDs[index] <= 3 and sonarIDs[index] >= 0:
+            if sonarDistances[index] < maximumDistance and isInAvailablePercentage(sonarDistances[minimumIndex], sonarDistances[index], percentageRate):
+                levelOfSonar = float(sonarIDs[index])*(-1) # resizing the value between [-3, 3]
+                arrayOfSonarID.append(levelOfSonar)
+                counter += 1
+                continue
+        
+        elif sonarIDs[index] >= 9 and sonarIDs[index] <= 11:
+            if sonarDistances[index] < maximumDistance and isInAvailablePercentage(sonarDistances[minimumIndex], sonarDistances[index], percentageRate):
+                levelOfSonar = (12-float(sonarIDs[index])) # resizing the value between [-3, 3]
+                arrayOfSonarID.append(levelOfSonar)
+                counter += 1
+                continue
+
+    # Eliminate the outliers
+    if counter > 0 :
+        arrayOfSonarID = eliminateOutliers(arrayOfSonarID, 1.3)
+    
+    return arrayOfSonarID
+
+def callback_left_arm_follow(msg) :
+    global xAxisLeft
+    global yAxisLeft
+    # taken the coordinates
+    x = msg.pose.position.x
+    y = msg.pose.position.y
+    z = msg.pose.position.z
+
+    # angle calculation of y axis
+    yAxisAngle = math.atan(abs(x)/(z-c)) 
+    if yAxisAngle < 0:
+        yAxisAngle = (-3.14/2)-yAxisAngle
+    else:
+        yAxisAngle = (3.14/2)-yAxisAngle
+    if isItLeftArm == True :
+        yAxisLeft = (-76.394) * ( yAxisAngle )
+        xAxisLeft = (57.294) * ( math.atan(y/abs(x)) )
+
+def callback_right_arm_follow(msg):
+    global xAxisRight
+    global yAxisRight
+    # taken the coordinates
+    x = msg.pose.position.x
+    y = msg.pose.position.y
+    z = msg.pose.position.z
+
+    # angle calculation of y axis
+    yAxisAngle = math.atan(abs(x)/(z-c)) 
+    if yAxisAngle < 0:
+        yAxisAngle = (-3.14/2)-yAxisAngle
+    else:
+        yAxisAngle = (3.14/2)-yAxisAngle
+
+    if isItLeftArm == False :
+        yAxisRight = (-76.394) * ( yAxisAngle )
+        xAxisRight = (57.294) * ( math.atan(y/abs(x)) )
+
+    """ Main Functions """
+
+def main():
+    global wobbler
+    print "entered main part..."
+#    wobbler = head_wobbler.Wobbler()
+    #face.testAllImages(cv2, publish_image)
+    face.sleep(cv2, publish_image)
+#    rospy.Subscriber('/robot/sonar/head_sonar/state', PointCloud, callback_human_follow)
+#    rospy.Subscriber('/robot/limb/left/endpoint_state', EndpointState, callback_left_arm_follow)
+#    rospy.Subscriber('/robot/limb/right/endpoint_state', EndpointState, callback_right_arm_follow)
+    rospy.Subscriber('display_chatter', String, callback_Command)
+    rospy.spin()
+    return 0
+
+def main_loop() :
+    global isSystemRun
+    rospy.sleep(2)
+    rate = rospy.Rate(10) #10 times in a second (loop frequency)
+    #These time keepers for the eyelid
+    referenceTime = timeit.default_timer()
+    currentTime = timeit.default_timer()
+    print "entered main loop part..."
+
+    while not rospy.is_shutdown() :
+        
+        # Blink for each 5 seconds.
+        currentTime = timeit.default_timer()
+        if currentTime - referenceTime > 5:
+            face.wink(cv2, publish_image)
+            referenceTime = timeit.default_timer()
+            print "wink motion is applicated"
+
+        if humanFollowControl == True :
+            if oldCoor != face.eye.getPositionX():
+                if dynamicControl == False :
+                    face.eye.lookExactCoordinate(coor, 0)
+                    face.show(publish_image)
+                else : 
+                    face.lookExactCoordinateDynamic(cv2, coor, 0, publish_image, wobbler)
+                    face.show(publish_image)
+
+        elif armFollowControl == True :
+            if dynamicControl == False :
+                if isItLeftArm:
+                    face.eye.lookExactCoordinate(int(xAxisLeft), int(yAxisLeft))
+                else:
+                    face.eye.lookExactCoordinate(int(xAxisRight), int(yAxisRight))
+            else:
+                if isItLeftArm:
+                    face.lookExactCoordinateDynamic(int(xAxisLeft), int(yAxisLeft), publish_image, wobbler)
+                else:
+                    face.lookExactCoordinateDynamic(int(xAxisRight), int(yAxisRight), publish_image, wobbler)
+            face.show(publish_image)
+
+        if isSystemRun == False :
+            sys.exit()
+    
+    face.show(publish_image)
+    isSystemRun = False
+
+if __name__ == '__main__' :
+
+    rospy.init_node('rsdk_xdisplay_image', anonymous=True)
+    
+    threadMain = threading.Thread(name='listener', target=main)
+    threadMainLoop = threading.Thread(name='main_loop', target=main_loop)
+
+    try:
+        threadMain.daemon = True
+        threadMainLoop.daemon = True
+        threadMainLoop.start()
+        threadMain.start()
+    except (KeyboardInterrupt, SystemExit):
+        cleanup_stop_thread()
+        sys.exit()
+
+    except :
+        print "Unable to start thread"
+    while 1 :
+        if isSystemRun == False :
+            break
+        pass